├── .github └── workflows │ └── docker-build-publish.yml ├── .gitignore ├── Dockerfile ├── Gulpfile.js ├── Makefile ├── README.md ├── db ├── localSwitchFilesDB.go ├── persistentDB.go ├── switchTitlesDB.go └── utils.go ├── fileio └── splitFileUtil.go ├── go.mod ├── go.sum ├── main.go ├── package-lock.json ├── package.json ├── pagination └── pagination.go ├── process ├── incompleteTitleProcessor.go ├── organizefolderStructure.go └── organizefolderStructure_test.go ├── resources ├── layout.html ├── pages │ ├── dlc.html │ ├── index.html │ ├── issues.html │ ├── missing.html │ ├── settings.html │ └── updates.html ├── partials │ ├── card.html │ ├── filter.html │ └── pagination.html ├── static │ ├── android-chrome-192x192.png │ ├── android-chrome-512x512.png │ ├── apple-touch-icon.png │ ├── browserconfig.xml │ ├── favicon-16x16.png │ ├── favicon-32x32.png │ ├── favicon.ico │ ├── icon.svg │ ├── mstile-144x144.png │ ├── mstile-150x150.png │ ├── mstile-310x150.png │ ├── mstile-310x310.png │ ├── mstile-70x70.png │ ├── noimage.png │ ├── safari-pinned-tab.svg │ └── site.webmanifest ├── web.js └── web.scss ├── settings ├── keys.go └── settings.go ├── switchfs ├── _crypto │ ├── ecb.go │ └── xts.go ├── cnmt.go ├── fs.go ├── nacp.go ├── nca.go ├── ncaHeader.go ├── nsp.go ├── pfs0.go ├── romfs.go ├── splitFileReader.go └── xci.go └── web ├── api.go ├── dlc.go ├── filter.go ├── http.go ├── img.go ├── index.go ├── issues.go ├── missing.go ├── organize.go ├── resources.go ├── settings.go ├── synchronize.go ├── updates.go └── web.go /.github/workflows/docker-build-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | workflow_dispatch: 8 | 9 | env: 10 | REGISTRY: ghcr.io 11 | IMAGE_NAME: ${{ github.repository }} 12 | 13 | jobs: 14 | 15 | publish: 16 | runs-on: ubuntu-latest 17 | permissions: 18 | contents: read 19 | packages: write 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@v3 23 | 24 | - name: Set up Node 25 | uses: actions/setup-node@v3 26 | with: 27 | node-version: 18 28 | 29 | - name: Set up Go 30 | uses: actions/setup-go@v4 31 | with: 32 | go-version: '1.18' 33 | 34 | - name: Build application 35 | run: npm install && make gulp build 36 | 37 | - name: Log in to the Container registry 38 | uses: docker/login-action@v2 39 | with: 40 | registry: ${{ env.REGISTRY }} 41 | username: ${{ github.actor }} 42 | password: ${{ secrets.GITHUB_TOKEN }} 43 | 44 | - name: Extract metadata (tags, labels) for Docker 45 | id: meta 46 | uses: docker/metadata-action@v4 47 | with: 48 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 49 | 50 | - name: Build and push Docker image 51 | uses: docker/build-push-action@v4 52 | with: 53 | context: . 54 | push: true 55 | tags: ${{ steps.meta.outputs.tags }} 56 | labels: ${{ steps.meta.outputs.labels }} 57 | 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | build 3 | node_modules 4 | resources/static/web.css 5 | resources/static/web.js 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | 3 | RUN apk add --no-cache tzdata 4 | 5 | ENV SLM_DATA_DIR /usr/local/share/switch-library-manager-web 6 | 7 | RUN mkdir -p $SLM_DATA_DIR 8 | 9 | COPY build/switch-library-manager-web /usr/local/bin/switch-library-manager-web 10 | 11 | VOLUME $SLM_DATA_DIR 12 | VOLUME /mnt/roms 13 | 14 | EXPOSE 3000 15 | 16 | CMD ["switch-library-manager-web"] 17 | -------------------------------------------------------------------------------- /Gulpfile.js: -------------------------------------------------------------------------------- 1 | const { dest, series, src, watch } = require('gulp'); 2 | 3 | const clean = require('gulp-clean'); 4 | const concat = require('gulp-concat'); 5 | const gulpif = require('gulp-if'); 6 | const jshint = require('gulp-jshint'); 7 | const minify = require('gulp-babel-minify'); 8 | const purgeSourcemaps = require('gulp-purge-sourcemaps'); 9 | const removeEmptyLines = require('gulp-remove-empty-lines'); 10 | const sourcemaps = require('gulp-sourcemaps'); 11 | const sass = require('gulp-sass')(require('sass')); 12 | 13 | function cleanup() { 14 | return src('resources/static/web.css', {"allowEmpty": true}) 15 | .pipe(src('resources/static/web.js', {"allowEmpty": true})) 16 | .pipe(clean()); 17 | } 18 | 19 | function css() { 20 | return src('resources/web.scss') 21 | .pipe(sass({outputStyle: 'compressed'}) 22 | .on('error', sass.logError)) 23 | .pipe(dest('resources/static/')) 24 | } 25 | 26 | function lint() { 27 | return src('resources/web.js') 28 | .pipe(jshint()) 29 | .pipe(jshint.reporter('jshint-stylish')) 30 | .pipe(jshint.reporter('fail')); 31 | } 32 | 33 | function js() { 34 | return src('node_modules/bootstrap/dist/js/bootstrap.bundle.min.js') 35 | .pipe(src('resources/web.js')) 36 | .pipe(gulpif('!**/*.min.js', minify())) 37 | .pipe(sourcemaps.init({loadMaps: true})) 38 | .pipe(purgeSourcemaps()) 39 | .pipe(concat('web.js')) 40 | .pipe(removeEmptyLines()) 41 | .pipe(dest('resources/static/')); 42 | } 43 | 44 | exports.clean = cleanup; 45 | exports.css = css; 46 | exports.js = series(lint, js); 47 | exports.lint = lint; 48 | 49 | exports.watch = function() { 50 | watch('resources/web.scss', css); 51 | watch('resources/web.js', series(lint, js)); 52 | }; 53 | 54 | exports.default = series(cleanup, css, lint, js) 55 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: clean gulp build 2 | 3 | build: 4 | GOOS=linux CGO_ENABLED=0 go build -o build/switch-library-manager-web main.go 5 | 6 | clean: 7 | rm -rf build || true 8 | gulp clean 9 | 10 | gulp: 11 | gulp 12 | 13 | run: 14 | go run main.go 15 | 16 | watch: 17 | gulp watch 18 | 19 | .PHONY: build 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Switch Library Manager Web 3 | Easily manage your switch game backups 4 | 5 | #### Features: 6 | - Cross platform, works on Windows / Mac / Linux 7 | - Web interface 8 | - Scan your local switch backup library (NSP/NSZ/XCI) 9 | - Read titleId/version by decrypting NSP/XCI/NSZ (requires prod.keys) 10 | - If no prod.keys present, fallback to read titleId/version by parsing file name (example: `Super Mario Odyssey [0100000000010000][v0].nsp`). 11 | - Lists missing update files (for games and DLC) 12 | - Lists missing DLCs 13 | - ~~Automatically organize games per folder~~ Not yet done 14 | - ~~Rename files based on metadata read from NSP~~ Not yet done 15 | - ~~Delete old update files (in case you have multiple update files for the same game, only the latest will remain)~~ Not yet done 16 | - ~~Delete empty folders~~ Not yet done 17 | - Zero dependencies, all crypto operations implemented in Go. 18 | 19 | ## Keys (optional) 20 | Having a prod.keys file will allow you to ensure the files you have a correctly classified. 21 | The app will look for the "prod.keys" file in the data folder. 22 | You can also specify a custom location in the settings page. 23 | 24 | Note: Only the header_key, and the key_area_key_application_XX keys are required. 25 | 26 | ## Naming template 27 | The following template elements are supported: 28 | - {TITLE_NAME} - game name 29 | - {TITLE_ID} - title id 30 | - {VERSION} - version id (only applicable to files) 31 | - {VERSION_TXT} - version number (like 1.0.0) (only applicable to files) 32 | - {REGION} - region 33 | - {TYPE} - impacts DLCs/updates, will appear as ["UPD","DLC"] 34 | - {DLC_NAME} - DLC name (only applicable to DLCs) 35 | 36 | ## Reporting issues 37 | Please set debug mode to 'true', and attach the docker log to allow for quicker resolution. 38 | 39 | ## Usage 40 | 41 | Please refer to the Docker documentation on how to run a Docker image. 42 | 43 | ##### Volumes inside the container 44 | - `/usr/local/share/switch-library-manager-web` 45 | - `/mnt/roms` 46 | 47 | ##### Example 48 | ``` 49 | $ docker run -d \ 50 | --name switch-library-manager-web \ 51 | -v /home/johndoe/switch-library-manager-web:/usr/local/share/switch-library-manager-web:Z \ 52 | -v /home/johndoe/Backups/Switch:/mnt/roms:Z \ 53 | -p 3000:3000 \ 54 | ghcr.io/dtrunk90/switch-library-manager-web 55 | ``` 56 | 57 | ## Building 58 | - Install and setup [Gulp](https://gulpjs.com) 59 | - Install and setup [Go](https://go.dev) 60 | - Clone the repo: `git clone https://github.com/dtrunk90/switch-library-manager-web.git` 61 | - Execute `make` 62 | - Binary will be available under build 63 | 64 | #### Thanks 65 | This program is based on [giwty's switch-library-manager](https://github.com/giwty/switch-library-manager) 66 | 67 | This program relies on [blawar's titledb](https://github.com/blawar/titledb), to get the latest titles and versions. 68 | 69 | -------------------------------------------------------------------------------- /db/localSwitchFilesDB.go: -------------------------------------------------------------------------------- 1 | package db 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "github.com/dtrunk90/switch-library-manager-web/fileio" 7 | "github.com/dtrunk90/switch-library-manager-web/settings" 8 | "github.com/dtrunk90/switch-library-manager-web/switchfs" 9 | "go.uber.org/zap" 10 | "os" 11 | "path/filepath" 12 | "regexp" 13 | "strconv" 14 | "strings" 15 | ) 16 | 17 | var ( 18 | versionRegex = regexp.MustCompile(`\[[vV]?(?P[0-9]{1,10})]`) 19 | titleIdRegex = regexp.MustCompile(`\[(?P[A-Z,a-z0-9]{16})]`) 20 | ) 21 | 22 | const ( 23 | DB_TABLE_FILE_SCAN_METADATA = "deep-scan" 24 | DB_TABLE_LOCAL_LIBRARY = "local-library" 25 | 26 | REASON_UNSUPPORTED_TYPE = iota 27 | REASON_DUPLICATE 28 | REASON_OLD_UPDATE 29 | REASON_UNRECOGNISED 30 | REASON_MALFORMED_FILE 31 | ) 32 | 33 | type LocalSwitchDBManager struct { 34 | db *PersistentDB 35 | } 36 | 37 | func NewLocalSwitchDBManager(dataFolder string) (*LocalSwitchDBManager, error) { 38 | db, err := NewPersistentDB(dataFolder) 39 | if err != nil { 40 | return nil, err 41 | } 42 | return &LocalSwitchDBManager{db: db}, nil 43 | } 44 | 45 | func (ldb *LocalSwitchDBManager) Close() { 46 | ldb.db.Close() 47 | } 48 | 49 | type ExtendedFileInfo struct { 50 | FileName string 51 | BaseFolder string 52 | Size int64 53 | IsDir bool 54 | } 55 | 56 | type SwitchFileInfo struct { 57 | ExtendedInfo ExtendedFileInfo 58 | Metadata *switchfs.ContentMetaAttributes 59 | } 60 | 61 | type SwitchGameFiles struct { 62 | File SwitchFileInfo 63 | BaseExist bool 64 | Updates map[int]SwitchFileInfo 65 | Dlc map[string]SwitchFileInfo 66 | MultiContent bool 67 | LatestUpdate int 68 | IsSplit bool 69 | Icon string 70 | Banner string 71 | } 72 | 73 | type SkippedFile struct { 74 | ReasonCode int 75 | ReasonText string 76 | AdditionalInfo string 77 | } 78 | 79 | type LocalSwitchFilesDB struct { 80 | TitlesMap map[string]*SwitchGameFiles 81 | Skipped map[ExtendedFileInfo]SkippedFile 82 | NumFiles int 83 | } 84 | 85 | func (ldb *LocalSwitchDBManager) CreateLocalSwitchFilesDB(switchDB *SwitchTitlesDB, dataFolder string, 86 | folders []string, progress ProgressUpdater, recursive bool, ignoreCache bool) (*LocalSwitchFilesDB, error) { 87 | 88 | titles := map[string]*SwitchGameFiles{} 89 | skipped := map[ExtendedFileInfo]SkippedFile{} 90 | files := []ExtendedFileInfo{} 91 | 92 | if !ignoreCache { 93 | ldb.db.GetEntry(DB_TABLE_LOCAL_LIBRARY, "files", &files) 94 | ldb.db.GetEntry(DB_TABLE_LOCAL_LIBRARY, "skipped", &skipped) 95 | ldb.db.GetEntry(DB_TABLE_LOCAL_LIBRARY, "titles", &titles) 96 | } 97 | 98 | if len(titles) == 0 { 99 | 100 | for i, folder := range folders { 101 | err := scanFolder(folder, recursive, &files, progress) 102 | if progress != nil { 103 | progress.UpdateProgress(i+1, len(folders)+1, "scanning files in "+folder) 104 | } 105 | if err != nil { 106 | continue 107 | } 108 | } 109 | 110 | ldb.processLocalFiles(switchDB, dataFolder, files, progress, titles, skipped) 111 | 112 | ldb.db.AddEntry(DB_TABLE_LOCAL_LIBRARY, "files", files) 113 | ldb.db.AddEntry(DB_TABLE_LOCAL_LIBRARY, "skipped", skipped) 114 | ldb.db.AddEntry(DB_TABLE_LOCAL_LIBRARY, "titles", titles) 115 | } 116 | 117 | if progress != nil { 118 | progress.UpdateProgress(len(files), len(files), "Complete") 119 | } 120 | 121 | return &LocalSwitchFilesDB{TitlesMap: titles, Skipped: skipped, NumFiles: len(files)}, nil 122 | } 123 | 124 | func scanFolder(folder string, recursive bool, files *[]ExtendedFileInfo, progress ProgressUpdater) error { 125 | filepath.Walk(folder, func(path string, info os.FileInfo, err error) error { 126 | if path == folder { 127 | return nil 128 | } 129 | if err != nil { 130 | zap.S().Error("Error while scanning folders", err) 131 | return nil 132 | } 133 | 134 | if info.IsDir() { 135 | return nil 136 | } 137 | 138 | //skip mac hidden files 139 | if info.Name()[0:1] == "." { 140 | return nil 141 | } 142 | base := path[0 : len(path)-len(info.Name())] 143 | if strings.TrimSuffix(base, string(os.PathSeparator)) != strings.TrimSuffix(folder, string(os.PathSeparator)) && 144 | !recursive { 145 | return nil 146 | } 147 | if progress != nil { 148 | progress.UpdateProgress(-1, -1, "scanning "+info.Name()) 149 | } 150 | *files = append(*files, ExtendedFileInfo{FileName: info.Name(), BaseFolder: base, Size: info.Size(), IsDir: info.IsDir()}) 151 | 152 | return nil 153 | }) 154 | return nil 155 | } 156 | 157 | func (ldb *LocalSwitchDBManager) ClearScanData() error { 158 | return ldb.db.ClearTable(DB_TABLE_FILE_SCAN_METADATA) 159 | } 160 | 161 | func (ldb *LocalSwitchDBManager) processLocalFiles(switchDB *SwitchTitlesDB, dataFolder string, 162 | files []ExtendedFileInfo, 163 | progress ProgressUpdater, 164 | titles map[string]*SwitchGameFiles, 165 | skipped map[ExtendedFileInfo]SkippedFile) { 166 | ind := 0 167 | total := len(files) 168 | for _, file := range files { 169 | ind += 1 170 | if progress != nil { 171 | progress.UpdateProgress(ind, total, "process:"+file.FileName) 172 | } 173 | 174 | //scan sub-folders if flag is present 175 | filePath := filepath.Join(file.BaseFolder, file.FileName) 176 | if file.IsDir { 177 | continue 178 | } 179 | 180 | fileName := strings.ToLower(file.FileName) 181 | isSplit := false 182 | 183 | if partNum, err := strconv.Atoi(fileName[len(fileName)-2:]); err == nil { 184 | if partNum == 0 { 185 | isSplit = true 186 | } else { 187 | continue 188 | } 189 | 190 | } 191 | 192 | //only handle NSZ and NSP files 193 | 194 | if !isSplit && 195 | !strings.HasSuffix(fileName, "xci") && 196 | !strings.HasSuffix(fileName, "nsp") && 197 | !strings.HasSuffix(fileName, "nsz") && 198 | !strings.HasSuffix(fileName, "xcz") { 199 | skipped[file] = SkippedFile{ReasonCode: REASON_UNSUPPORTED_TYPE, ReasonText: "file type is not supported"} 200 | continue 201 | } 202 | 203 | contentMap, err := ldb.getGameMetadata(file, filePath, skipped) 204 | 205 | if err != nil { 206 | if _, ok := skipped[file]; !ok { 207 | skipped[file] = SkippedFile{ReasonText: "unable to determine title-Id / version - " + err.Error(), ReasonCode: REASON_UNRECOGNISED} 208 | } 209 | continue 210 | } 211 | 212 | for _, metadata := range contentMap { 213 | 214 | idPrefix := metadata.TitleId[0 : len(metadata.TitleId)-4] 215 | 216 | multiContent := len(contentMap) > 1 217 | switchTitle := &SwitchGameFiles{ 218 | MultiContent: multiContent, 219 | Updates: map[int]SwitchFileInfo{}, 220 | Dlc: map[string]SwitchFileInfo{}, 221 | BaseExist: false, 222 | IsSplit: isSplit, 223 | LatestUpdate: 0, 224 | } 225 | if t, ok := titles[idPrefix]; ok { 226 | switchTitle = t 227 | } 228 | titles[idPrefix] = switchTitle 229 | 230 | //process Updates 231 | if strings.HasSuffix(metadata.TitleId, "800") { 232 | metadata.Type = "Update" 233 | 234 | if update, ok := switchTitle.Updates[metadata.Version]; ok { 235 | skipped[file] = SkippedFile{ReasonCode: REASON_DUPLICATE, ReasonText: "duplicate update file (" + update.ExtendedInfo.FileName + ")"} 236 | zap.S().Warnf("-->Duplicate update file found [%v] and [%v]", update.ExtendedInfo.FileName, file.FileName) 237 | continue 238 | } 239 | switchTitle.Updates[metadata.Version] = SwitchFileInfo{ExtendedInfo: file, Metadata: metadata} 240 | if metadata.Version > switchTitle.LatestUpdate { 241 | if switchTitle.LatestUpdate != 0 { 242 | skipped[switchTitle.Updates[switchTitle.LatestUpdate].ExtendedInfo] = SkippedFile{ReasonCode: REASON_OLD_UPDATE, ReasonText: "old update file, newer update exist locally"} 243 | } 244 | switchTitle.LatestUpdate = metadata.Version 245 | } else { 246 | skipped[file] = SkippedFile{ReasonCode: REASON_OLD_UPDATE, ReasonText: "old update file, newer update exist locally"} 247 | } 248 | continue 249 | } 250 | 251 | //process base 252 | if strings.HasSuffix(metadata.TitleId, "000") { 253 | metadata.Type = "Base" 254 | if switchTitle.BaseExist { 255 | skipped[file] = SkippedFile{ReasonCode: REASON_DUPLICATE, ReasonText: "duplicate base file (" + switchTitle.File.ExtendedInfo.FileName + ")"} 256 | zap.S().Warnf("-->Duplicate base file found [%v] and [%v]", file.FileName, switchTitle.File.ExtendedInfo.FileName) 257 | continue 258 | } 259 | switchTitle.File = SwitchFileInfo{ExtendedInfo: file, Metadata: metadata} 260 | switchTitle.BaseExist = true 261 | 262 | if title, ok := switchDB.TitlesMap[idPrefix]; ok { 263 | if title.Attributes.IconUrl != "" { 264 | basename := filepath.Base(title.Attributes.IconUrl) 265 | filename := filepath.Join(dataFolder, "img", basename) 266 | if err := DownloadFile(title.Attributes.IconUrl, filename); err == nil { 267 | switchTitle.Icon = basename 268 | } 269 | } 270 | if title.Attributes.BannerUrl != "" { 271 | basename := filepath.Base(title.Attributes.BannerUrl) 272 | filename := filepath.Join(dataFolder, "img", basename) 273 | if err := DownloadFile(title.Attributes.BannerUrl, filename); err == nil { 274 | switchTitle.Banner = basename 275 | } 276 | } 277 | } 278 | 279 | continue 280 | } 281 | 282 | if dlc, ok := switchTitle.Dlc[metadata.TitleId]; ok { 283 | if metadata.Version < dlc.Metadata.Version { 284 | skipped[file] = SkippedFile{ReasonCode: REASON_OLD_UPDATE, ReasonText: "old DLC file, newer version exist locally"} 285 | zap.S().Warnf("-->Old DLC file found [%v] and [%v]", file.FileName, dlc.ExtendedInfo.FileName) 286 | continue 287 | } else if metadata.Version == dlc.Metadata.Version { 288 | skipped[file] = SkippedFile{ReasonCode: REASON_DUPLICATE, ReasonText: "duplicate DLC file (" + dlc.ExtendedInfo.FileName + ")"} 289 | zap.S().Warnf("-->Duplicate DLC file found [%v] and [%v]", file.FileName, dlc.ExtendedInfo.FileName) 290 | continue 291 | } 292 | } 293 | //not an update, and not main TitleAttributes, so treat it as a DLC 294 | metadata.Type = "DLC" 295 | switchTitle.Dlc[metadata.TitleId] = SwitchFileInfo{ExtendedInfo: file, Metadata: metadata} 296 | } 297 | } 298 | 299 | } 300 | 301 | func (ldb *LocalSwitchDBManager) getGameMetadata(file ExtendedFileInfo, 302 | filePath string, 303 | skipped map[ExtendedFileInfo]SkippedFile) (map[string]*switchfs.ContentMetaAttributes, error) { 304 | 305 | var metadata map[string]*switchfs.ContentMetaAttributes = nil 306 | keys, _ := settings.SwitchKeys() 307 | var err error 308 | fileKey := filePath + "|" + file.FileName + "|" + strconv.Itoa(int(file.Size)) 309 | if keys != nil && keys.GetKey("header_key") != "" { 310 | err = ldb.db.GetEntry(DB_TABLE_FILE_SCAN_METADATA, fileKey, &metadata) 311 | 312 | if err != nil { 313 | zap.S().Warnf("%v", err) 314 | } 315 | 316 | if metadata != nil { 317 | return metadata, nil 318 | } 319 | 320 | fileName := strings.ToLower(file.FileName) 321 | if strings.HasSuffix(fileName, "nsp") || 322 | strings.HasSuffix(fileName, "nsz") { 323 | metadata, err = switchfs.ReadNspMetadata(filePath) 324 | if err != nil { 325 | skipped[file] = SkippedFile{ReasonCode: REASON_MALFORMED_FILE, ReasonText: fmt.Sprintf("failed to read NSP [reason: %v]", err)} 326 | zap.S().Errorf("[file:%v] failed to read NSP [reason: %v]\n", file.FileName, err) 327 | } 328 | } else if strings.HasSuffix(fileName, "xci") || 329 | strings.HasSuffix(fileName, "xcz") { 330 | metadata, err = switchfs.ReadXciMetadata(filePath) 331 | if err != nil { 332 | skipped[file] = SkippedFile{ReasonCode: REASON_MALFORMED_FILE, ReasonText: fmt.Sprintf("failed to read NSP [reason: %v]", err)} 333 | zap.S().Errorf("[file:%v] failed to read file [reason: %v]\n", file.FileName, err) 334 | } 335 | } else if strings.HasSuffix(fileName, "00") { 336 | metadata, err = fileio.ReadSplitFileMetadata(filePath) 337 | if err != nil { 338 | skipped[file] = SkippedFile{ReasonCode: REASON_MALFORMED_FILE, ReasonText: fmt.Sprintf("failed to read split files [reason: %v]", err)} 339 | zap.S().Errorf("[file:%v] failed to read NSP [reason: %v]\n", file.FileName, err) 340 | } 341 | } 342 | } 343 | 344 | if metadata != nil { 345 | err = ldb.db.AddEntry(DB_TABLE_FILE_SCAN_METADATA, fileKey, metadata) 346 | 347 | if err != nil { 348 | zap.S().Warnf("%v", err) 349 | } 350 | return metadata, nil 351 | } 352 | 353 | //fallback to parse data from filename 354 | 355 | //parse title id 356 | titleId, _ := parseTitleIdFromFileName(file.FileName) 357 | version, _ := parseVersionFromFileName(file.FileName) 358 | 359 | if titleId == nil || version == nil { 360 | return nil, errors.New("unable to determine titileId / version") 361 | } 362 | metadata = map[string]*switchfs.ContentMetaAttributes{} 363 | metadata[*titleId] = &switchfs.ContentMetaAttributes{TitleId: *titleId, Version: *version} 364 | 365 | return metadata, nil 366 | } 367 | 368 | func parseVersionFromFileName(fileName string) (*int, error) { 369 | res := versionRegex.FindStringSubmatch(fileName) 370 | if len(res) != 2 { 371 | return nil, errors.New("failed to parse name - no version id found") 372 | } 373 | ver, err := strconv.Atoi(res[1]) 374 | if err != nil { 375 | return nil, errors.New("failed to parse name - no version id found") 376 | } 377 | return &ver, nil 378 | } 379 | 380 | func parseTitleIdFromFileName(fileName string) (*string, error) { 381 | res := titleIdRegex.FindStringSubmatch(fileName) 382 | 383 | if len(res) != 2 { 384 | return nil, errors.New("failed to parse name - no title id found") 385 | } 386 | titleId := strings.ToLower(res[1]) 387 | return &titleId, nil 388 | } 389 | 390 | func ParseTitleNameFromFileName(fileName string) string { 391 | ind := strings.Index(fileName, "[") 392 | if ind != -1 { 393 | return fileName[:ind] 394 | } 395 | return fileName 396 | } 397 | -------------------------------------------------------------------------------- /db/persistentDB.go: -------------------------------------------------------------------------------- 1 | package db 2 | 3 | import ( 4 | "bytes" 5 | "encoding/gob" 6 | "fmt" 7 | "github.com/boltdb/bolt" 8 | "github.com/dtrunk90/switch-library-manager-web/settings" 9 | "go.uber.org/zap" 10 | "log" 11 | "path/filepath" 12 | ) 13 | 14 | const ( 15 | DB_INTERNAL_TABLENAME = "internal-metadata" 16 | ) 17 | 18 | type PersistentDB struct { 19 | db *bolt.DB 20 | } 21 | 22 | func NewPersistentDB(dataFolder string) (*PersistentDB, error) { 23 | // Open the my.db data file in your current directory. 24 | // It will be created if it doesn't exist. 25 | db, err := bolt.Open(filepath.Join(dataFolder, "slm.db"), 0644, &bolt.Options{Timeout: 1 * 60}) 26 | if err != nil { 27 | log.Fatal(err) 28 | return nil, err 29 | } 30 | 31 | //set DB version 32 | err = db.View(func(tx *bolt.Tx) error { 33 | b := tx.Bucket([]byte(DB_INTERNAL_TABLENAME)) 34 | if b == nil { 35 | b, err := tx.CreateBucket([]byte(DB_INTERNAL_TABLENAME)) 36 | if b == nil || err != nil { 37 | return fmt.Errorf("create bucket: %s", err) 38 | } 39 | err = b.Put([]byte("app_version"), []byte(settings.SLM_VERSION)) 40 | if err != nil { 41 | zap.S().Warnf("failed to save app_version - %v", err) 42 | return err 43 | } 44 | } 45 | return nil 46 | }) 47 | 48 | return &PersistentDB{db: db}, nil 49 | } 50 | 51 | func (pd *PersistentDB) Close() { 52 | pd.db.Close() 53 | } 54 | 55 | func (pd *PersistentDB) ClearTable(tableName string) error { 56 | err := pd.db.Update(func(tx *bolt.Tx) error { 57 | err := tx.DeleteBucket([]byte(tableName)) 58 | return err 59 | }) 60 | return err 61 | } 62 | 63 | func (pd *PersistentDB) AddEntry(tableName string, key string, value interface{}) error { 64 | var err error 65 | err = pd.db.Update(func(tx *bolt.Tx) error { 66 | b := tx.Bucket([]byte(tableName)) 67 | if b == nil { 68 | b, err = tx.CreateBucket([]byte(tableName)) 69 | if b == nil || err != nil { 70 | return fmt.Errorf("create bucket: %s", err) 71 | } 72 | } 73 | var bytesBuff bytes.Buffer 74 | encoder := gob.NewEncoder(&bytesBuff) 75 | err := encoder.Encode(value) 76 | if err != nil { 77 | return err 78 | } 79 | err = b.Put([]byte(key), bytesBuff.Bytes()) 80 | return err 81 | }) 82 | return err 83 | } 84 | 85 | func (pd *PersistentDB) GetEntry(tableName string, key string, value interface{}) error { 86 | err := pd.db.View(func(tx *bolt.Tx) error { 87 | 88 | b := tx.Bucket([]byte(tableName)) 89 | if b == nil { 90 | return nil 91 | } 92 | v := b.Get([]byte(key)) 93 | if v == nil { 94 | return nil 95 | } 96 | d := gob.NewDecoder(bytes.NewReader(v)) 97 | 98 | // Decoding the serialized data 99 | err := d.Decode(value) 100 | if err != nil { 101 | return err 102 | } 103 | return nil 104 | }) 105 | return err 106 | } 107 | 108 | /*func (pd *PersistentDB) GetEntries() (map[string]*switchfs.ContentMetaAttributes, error) { 109 | pd.db.View(func(tx *bolt.Tx) error { 110 | // Assume bucket exists and has keys 111 | b := tx.Bucket([]byte(METADATA_TABLENAME)) 112 | 113 | c := b.Cursor() 114 | 115 | for k, v := c.First(); k != nil; k, v = c.Next() { 116 | fmt.Printf("key=%s, value=%s\n", k, v) 117 | } 118 | 119 | return nil 120 | }) 121 | }*/ 122 | -------------------------------------------------------------------------------- /db/switchTitlesDB.go: -------------------------------------------------------------------------------- 1 | package db 2 | 3 | import ( 4 | "encoding/json" 5 | "io" 6 | "strings" 7 | ) 8 | 9 | type TitleAttributes struct { 10 | Id string `json:"id"` 11 | Name string `json:"name,omitempty"` 12 | Version json.Number `json:"version,omitempty"` 13 | Region string `json:"region,omitempty"` 14 | ReleaseDate int `json:"releaseDate,omitempty"` 15 | Publisher string `json:"publisher,omitempty"` 16 | IconUrl string `json:"iconUrl,omitempty"` 17 | Screenshots []string `json:"screenshots,omitempty"` 18 | BannerUrl string `json:"bannerUrl,omitempty"` 19 | Description string `json:"description,omitempty"` 20 | Size int `json:"size,omitempty"` 21 | } 22 | 23 | type SwitchTitle struct { 24 | Attributes TitleAttributes 25 | Updates map[int]string 26 | Dlc map[string]TitleAttributes 27 | } 28 | 29 | type SwitchTitlesDB struct { 30 | TitlesMap map[string]*SwitchTitle 31 | } 32 | 33 | func CreateSwitchTitleDB(titlesFile, versionsFile io.Reader) (*SwitchTitlesDB, error) { 34 | //parse the titles objects 35 | var titles = map[string]TitleAttributes{} 36 | err := decodeToJsonObject(titlesFile, &titles) 37 | if err != nil { 38 | return nil, err 39 | } 40 | 41 | //parse the titles objects 42 | //titleID -> versionId-> release date 43 | var versions = map[string]map[int]string{} 44 | err = decodeToJsonObject(versionsFile, &versions) 45 | if err != nil { 46 | return nil, err 47 | } 48 | 49 | result := SwitchTitlesDB{TitlesMap: map[string]*SwitchTitle{}} 50 | for id, attr := range titles { 51 | id = strings.ToLower(id) 52 | 53 | //TitleAttributes id rules: 54 | //main TitleAttributes ends with 000 55 | //Updates ends with 800 56 | //Dlc have a running counter (starting with 001) in the 4 last chars 57 | idPrefix := id[0 : len(id)-4] 58 | switchTitle := &SwitchTitle{Dlc: map[string]TitleAttributes{}} 59 | if t, ok := result.TitlesMap[idPrefix]; ok { 60 | switchTitle = t 61 | } 62 | result.TitlesMap[idPrefix] = switchTitle 63 | 64 | //process Updates 65 | if strings.HasSuffix(id, "800") { 66 | updates := versions[id[0:len(id)-3]+"000"] 67 | switchTitle.Updates = updates 68 | continue 69 | } 70 | 71 | //process main TitleAttributes 72 | if strings.HasSuffix(id, "000") { 73 | switchTitle.Attributes = attr 74 | continue 75 | } 76 | 77 | //not an update, and not main TitleAttributes, so treat it as a DLC 78 | switchTitle.Dlc[id] = attr 79 | 80 | } 81 | 82 | return &result, nil 83 | } 84 | -------------------------------------------------------------------------------- /db/utils.go: -------------------------------------------------------------------------------- 1 | package db 2 | 3 | import ( 4 | bytes2 "bytes" 5 | "encoding/json" 6 | "errors" 7 | "go.uber.org/zap" 8 | "io" 9 | "io/ioutil" 10 | "net" 11 | "net/http" 12 | "os" 13 | "time" 14 | ) 15 | 16 | type ProgressUpdater interface { 17 | UpdateProgress(curr int, total int, message string) 18 | } 19 | 20 | func LoadAndUpdateFile(url string, filePath string, etag string) (*os.File, string, error) { 21 | 22 | //create file if not exist 23 | if _, err := os.Stat(filePath); os.IsNotExist(err) { 24 | _, err = os.Create(filePath) 25 | if err != nil { 26 | zap.S().Errorf("Failed to create file %v - %v\n", filePath, err) 27 | return nil, "", err 28 | } 29 | } 30 | 31 | var file *os.File = nil 32 | 33 | //try to check if there is a new version 34 | //if so, save the file 35 | bytes, newEtag, err := downloadBytesFromUrl(url, etag) 36 | if err == nil { 37 | //validate json structure 38 | var test map[string]interface{} 39 | err = decodeToJsonObject(bytes2.NewReader(bytes), &test) 40 | if err == nil { 41 | file, err = saveFile(bytes, filePath) 42 | etag = newEtag 43 | } else { 44 | zap.S().Infof("ignoring new update [%v], reason - [mailformed json file]", url) 45 | } 46 | } else { 47 | zap.S().Infof("file [%v] was not downloaded, reason - [%v]", url, err) 48 | } 49 | 50 | if file == nil { 51 | //load file 52 | file, err = os.Open(filePath) 53 | if err != nil { 54 | zap.S().Infof("ignoring new update [%v], reason - [mailformed json file]", url) 55 | return nil, "", err 56 | } 57 | 58 | fileInfo, err := os.Stat(filePath) 59 | if err != nil || fileInfo.Size() == 0 { 60 | zap.S().Infof("Local file is empty, or corrupted") 61 | return nil, "", errors.New("unable to download switch titles db") 62 | } 63 | } 64 | 65 | return file, etag, err 66 | } 67 | 68 | func decodeToJsonObject(reader io.Reader, target interface{}) error { 69 | err := json.NewDecoder(reader).Decode(target) 70 | return err 71 | } 72 | 73 | func downloadBytesFromUrl(url string, etag string) ([]byte, string, error) { 74 | req, err := http.NewRequest("GET", url, nil) 75 | if err != nil { 76 | return nil, "", err 77 | } 78 | req.Header.Set("If-None-Match", etag) 79 | transport := &http.Transport{ 80 | DialContext: (&net.Dialer{ 81 | Timeout: 3 * time.Second, 82 | }).DialContext, 83 | } 84 | client := http.Client{ 85 | Transport: transport, 86 | } 87 | resp, err := client.Do(req) 88 | if err != nil { 89 | return nil, "", err 90 | } 91 | 92 | if resp.StatusCode >= 400 { 93 | return nil, "", errors.New("got a non 200 response - " + resp.Status) 94 | } 95 | defer resp.Body.Close() 96 | //getting the new etag 97 | etag = resp.Header.Get("Etag") 98 | 99 | if resp.StatusCode == http.StatusOK { 100 | body, err := ioutil.ReadAll(resp.Body) 101 | if err != nil { 102 | return nil, "", err 103 | } 104 | return body, etag, nil 105 | } 106 | 107 | return nil, "", errors.New("no new updates") 108 | } 109 | 110 | func saveFile(bytes []byte, fileName string) (*os.File, error) { 111 | 112 | err := ioutil.WriteFile(fileName, bytes, 0644) 113 | if err != nil { 114 | return nil, err 115 | } 116 | 117 | file, err := os.Open(fileName) 118 | if err != nil { 119 | return nil, err 120 | } 121 | return file, nil 122 | } 123 | 124 | 125 | 126 | func DownloadFile(url string, filePath string) error { 127 | //create file if not exist 128 | if _, err := os.Stat(filePath); os.IsNotExist(err) { 129 | _, err = os.Create(filePath) 130 | if err != nil { 131 | zap.S().Errorf("Failed to create file %v - %v\n", filePath, err) 132 | return err 133 | } 134 | } 135 | 136 | bytes, _, err := downloadBytesFromUrl(url, "") 137 | if err == nil { 138 | _, err = saveFile(bytes, filePath) 139 | } else { 140 | zap.S().Infof("file [%v] was not downloaded, reason - [%v]", url, err) 141 | } 142 | 143 | return err 144 | } 145 | -------------------------------------------------------------------------------- /fileio/splitFileUtil.go: -------------------------------------------------------------------------------- 1 | package fileio 2 | 3 | import ( 4 | "errors" 5 | "github.com/dtrunk90/switch-library-manager-web/switchfs" 6 | "os" 7 | ) 8 | 9 | func ReadSplitFileMetadata(filePath string) (map[string]*switchfs.ContentMetaAttributes, error) { 10 | //check if this is a NS* or XC* file 11 | _, err := switchfs.ReadPfs0File(filePath) 12 | isXCI := false 13 | if err != nil { 14 | _, err = readXciHeader(filePath) 15 | if err != nil { 16 | return nil, errors.New("split file is not an XCI/XCZ or NSP/NSZ") 17 | } 18 | isXCI = true 19 | } 20 | 21 | if isXCI { 22 | return switchfs.ReadXciMetadata(filePath) 23 | } else { 24 | return switchfs.ReadNspMetadata(filePath) 25 | } 26 | } 27 | 28 | func readXciHeader(filePath string) ([]byte, error) { 29 | file, err := os.Open(filePath) 30 | if err != nil { 31 | return nil, err 32 | } 33 | 34 | defer file.Close() 35 | 36 | header := make([]byte, 0x200) 37 | _, err = file.Read(header) 38 | if err != nil { 39 | return nil, err 40 | } 41 | 42 | if string(header[0x100:0x104]) != "HEAD" { 43 | return nil, errors.New("not an XCI/XCZ file") 44 | } 45 | return header, nil 46 | } 47 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/dtrunk90/switch-library-manager-web 2 | 3 | go 1.18 4 | 5 | require ( 6 | github.com/avast/retry-go v2.6.1+incompatible 7 | github.com/boltdb/bolt v1.3.1 8 | github.com/ggicci/httpin v0.11.0 9 | github.com/justinas/alice v1.2.0 10 | github.com/magiconair/properties v1.8.1 11 | go.uber.org/zap v1.16.0 12 | robpike.io/nihongo v0.0.0-20200511095354-a985f0929cfa 13 | ) 14 | 15 | require ( 16 | github.com/disintegration/gift v1.2.0 // indirect 17 | github.com/gorilla/mux v1.8.0 // indirect 18 | github.com/pierrre/imageserver v0.0.0-20231013080238-eaaa7dc7d331 // indirect 19 | github.com/pierrre/imageutil v1.0.0 // indirect 20 | github.com/stretchr/testify v1.5.1 // indirect 21 | go.uber.org/atomic v1.6.0 // indirect 22 | go.uber.org/multierr v1.5.0 // indirect 23 | golang.org/x/sys v0.13.0 // indirect 24 | gopkg.in/yaml.v2 v2.4.0 // indirect 25 | ) 26 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= 2 | github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= 3 | github.com/avast/retry-go v2.6.1+incompatible h1:quvLI98pOPWtTq7xnbX4TI5l9PmRJooM2AI1T7mOFUA= 4 | github.com/avast/retry-go v2.6.1+incompatible/go.mod h1:XtSnn+n/sHqQIpZ10K1qAevBhOOCWBLXXy3hyiqqBrY= 5 | github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4= 6 | github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= 7 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 8 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 9 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 10 | github.com/disintegration/gift v1.2.0 h1:VMQeei2F+ZtsHjMgP6Sdt1kFjRhs2lGz8ljEOPeIR50= 11 | github.com/disintegration/gift v1.2.0/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI= 12 | github.com/ggicci/httpin v0.11.0 h1:qJmMiFU06YU8k+ZPEoMwUdzxBqzW3WetOCCZj01GAPw= 13 | github.com/ggicci/httpin v0.11.0/go.mod h1:syzWknMH1AVLXb5yJlYV53J3lIIHDnABHpm+y92dYSQ= 14 | github.com/go-chi/chi/v5 v5.0.8 h1:lD+NLqFcAi1ovnVZpsnObHGW4xb4J8lNmoYVfECH1Y0= 15 | github.com/go-chi/chi/v5 v5.0.8/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= 16 | github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= 17 | github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= 18 | github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= 19 | github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= 20 | github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= 21 | github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= 22 | github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= 23 | github.com/justinas/alice v1.2.0 h1:+MHSA/vccVCF4Uq37S42jwlkvI2Xzl7zTPCN5BnZNVo= 24 | github.com/justinas/alice v1.2.0/go.mod h1:fN5HRH/reO/zrUflLfTN43t3vXvKzvZIENsNEe7i7qA= 25 | github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= 26 | github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= 27 | github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= 28 | github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= 29 | github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= 30 | github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= 31 | github.com/pierrre/imageserver v0.0.0-20231013080238-eaaa7dc7d331 h1:ApZeRWR6Fj6cqoNf8/KO+nfawuwzxWj8Y4ENzV4lfus= 32 | github.com/pierrre/imageserver v0.0.0-20231013080238-eaaa7dc7d331/go.mod h1:8StjKPKRxPZkzAM7QmO0/q3QPtTgA/qpCUECs5Pq1i0= 33 | github.com/pierrre/imageutil v1.0.0 h1:/DqwfUW34DdeZ/+btQRAD1NNIlLs+rAkskJJv5+jx2Q= 34 | github.com/pierrre/imageutil v1.0.0/go.mod h1:7NQKvBWOPV2rUECRLS1xs/w1l1Dn6r5dn4f3mrz5SQg= 35 | github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= 36 | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= 37 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 38 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 39 | github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= 40 | github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= 41 | github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= 42 | github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg= 43 | github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM= 44 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 45 | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= 46 | github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= 47 | github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= 48 | github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= 49 | go.uber.org/atomic v1.6.0 h1:Ezj3JGmsOnG1MoRWQkPBsKLe9DwWD9QeXzTRzzldNVk= 50 | go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= 51 | go.uber.org/multierr v1.5.0 h1:KCa4XfM8CWFCpxXRGok+Q0SS/0XBhMDbHHGABQLvD2A= 52 | go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= 53 | go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4= 54 | go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= 55 | go.uber.org/zap v1.16.0 h1:uFRZXykJGK9lLY4HtgSw44DnIcAM+kRBP7x5m+NpAOM= 56 | go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= 57 | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= 58 | golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= 59 | golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs= 60 | golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= 61 | golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= 62 | golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= 63 | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= 64 | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= 65 | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= 66 | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 67 | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 68 | golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1 h1:ogLJMz+qpzav7lGMh10LMvAkM/fAoGlaiiHYiFYdm80= 69 | golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 70 | golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= 71 | golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 72 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 73 | golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= 74 | golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= 75 | golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= 76 | golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= 77 | golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5 h1:hKsoRgsbwY1NafxrwTs+k64bikrLBkAgPir1TNCj3Zs= 78 | golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= 79 | golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= 80 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 81 | gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 82 | gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= 83 | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 84 | gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= 85 | gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 86 | gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= 87 | gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= 88 | honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM= 89 | honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= 90 | robpike.io/nihongo v0.0.0-20200511095354-a985f0929cfa h1:kqGRKbVGwPB5mg/++wMQxOybtmW0F8uC/y+Wmjipsvc= 91 | robpike.io/nihongo v0.0.0-20200511095354-a985f0929cfa/go.mod h1:43kowN+1Rj2cR4+62PIiOFD5L9AoTNCpNNjQibm7Dq0= 92 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "embed" 5 | "fmt" 6 | "github.com/dtrunk90/switch-library-manager-web/settings" 7 | "github.com/dtrunk90/switch-library-manager-web/web" 8 | "github.com/gorilla/mux" 9 | "go.uber.org/zap" 10 | "os" 11 | "path/filepath" 12 | ) 13 | 14 | //go:embed resources/static 15 | //go:embed node_modules/bootstrap-icons/font/fonts 16 | //go:embed node_modules/flag-icons/flags 17 | //go:embed resources/layout.html 18 | //go:embed resources/partials/*.html 19 | //go:embed resources/pages/*.html 20 | var embedFS embed.FS 21 | 22 | func main() { 23 | 24 | exePath, err := os.Executable() 25 | if err != nil { 26 | fmt.Println("failed to get executable directory, please ensure app has sufficient permissions. aborting") 27 | return 28 | } 29 | 30 | router := mux.NewRouter() 31 | 32 | dataFolder, ok := os.LookupEnv("SLM_DATA_DIR") 33 | if !ok { 34 | dataFolder = filepath.Dir(exePath) 35 | } 36 | 37 | appSettings := settings.ReadSettings(dataFolder) 38 | 39 | logger := createLogger(appSettings.Debug) 40 | 41 | defer logger.Sync() // flushes buffer, if any 42 | sugar := logger.Sugar() 43 | 44 | destinationPath := filepath.Join(dataFolder, "img") 45 | if _, err := os.Stat(destinationPath); os.IsNotExist(err) { 46 | if err := os.Mkdir(destinationPath, os.ModePerm); err != nil { 47 | sugar.Errorf("Failed to create folder img - %v\n", err) 48 | } 49 | } 50 | 51 | sugar.Info("[SLM starts]") 52 | sugar.Infof("[Executable: %v]", exePath) 53 | sugar.Infof("[Data folder: %v]", dataFolder) 54 | 55 | web.CreateWeb(router, embedFS, appSettings, dataFolder, sugar).Start() 56 | 57 | } 58 | 59 | func createLogger(debug bool) *zap.Logger { 60 | var config zap.Config 61 | if debug { 62 | config = zap.NewDevelopmentConfig() 63 | } else { 64 | config = zap.NewDevelopmentConfig() 65 | config.Level = zap.NewAtomicLevelAt(zap.InfoLevel) 66 | } 67 | logger, err := config.Build() 68 | if err != nil { 69 | fmt.Printf("failed to create logger - %v", err) 70 | panic(1) 71 | } 72 | zap.ReplaceGlobals(logger) 73 | return logger 74 | } 75 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "devDependencies": { 3 | "gulp": "^4.0.2", 4 | "gulp-babel-minify": "^0.5.2", 5 | "gulp-clean": "^0.4.0", 6 | "gulp-concat": "^2.6.1", 7 | "gulp-if": "^3.0.0", 8 | "gulp-jshint": "^2.1.0", 9 | "gulp-purge-sourcemaps": "^1.0.0", 10 | "gulp-remove-empty-lines": "^0.1.0", 11 | "gulp-sass": "^5.1.0", 12 | "gulp-sourcemaps": "^3.0.0", 13 | "jshint-stylish": "^2.2.1", 14 | "sass": "^1.65.1" 15 | }, 16 | "dependencies": { 17 | "bootstrap": "^5.3.1", 18 | "bootstrap-icons": "^1.10.5", 19 | "flag-icons": "^6.9.4" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /pagination/pagination.go: -------------------------------------------------------------------------------- 1 | package pagination 2 | 3 | import "math" 4 | 5 | type Pagination struct { 6 | NumPages int 7 | HasPrev, HasNext bool 8 | PrevPage, NextPage int 9 | ItemsPerPage int 10 | CurrentPage int 11 | NumItems int 12 | Start, End int 13 | } 14 | 15 | func Calculate(currentPage, itemsPerPage, numItems int) (p Pagination) { 16 | p = Pagination{} 17 | 18 | p.CurrentPage = currentPage 19 | p.ItemsPerPage = itemsPerPage 20 | p.NumItems = numItems 21 | 22 | // calc start + end 23 | p.Start = (currentPage - 1) * p.ItemsPerPage 24 | 25 | if p.Start > p.NumItems { 26 | p.Start = p.NumItems 27 | } 28 | 29 | p.End = p.Start + p.ItemsPerPage 30 | if p.End > p.NumItems { 31 | p.End = p.NumItems 32 | } 33 | 34 | // calc number of pages 35 | d := float64(p.NumItems) / float64(p.ItemsPerPage) 36 | p.NumPages = int(math.Ceil(d)) 37 | 38 | // HasPrev, HasNext? 39 | p.HasPrev = p.CurrentPage > 1 40 | p.HasNext = p.CurrentPage < p.NumPages 41 | 42 | // calculate prev + next pages 43 | if p.HasPrev { 44 | p.PrevPage = p.CurrentPage - 1 45 | } 46 | if p.HasNext { 47 | p.NextPage = p.CurrentPage + 1 48 | } 49 | 50 | return 51 | } 52 | -------------------------------------------------------------------------------- /process/incompleteTitleProcessor.go: -------------------------------------------------------------------------------- 1 | package process 2 | 3 | import ( 4 | "fmt" 5 | "github.com/dtrunk90/switch-library-manager-web/db" 6 | "github.com/dtrunk90/switch-library-manager-web/switchfs" 7 | "go.uber.org/zap" 8 | "sort" 9 | "strconv" 10 | ) 11 | 12 | type IncompleteTitle struct { 13 | Attributes db.TitleAttributes 14 | Meta *switchfs.ContentMetaAttributes 15 | LocalUpdate int `json:"local_update"` 16 | LatestUpdate int `json:"latest_update"` 17 | LatestUpdateDate string `json:"latest_update_date"` 18 | MissingDLC []string `json:"missing_dlc"` 19 | } 20 | 21 | func ScanForMissingUpdates(localDB map[string]*db.SwitchGameFiles, 22 | switchDB map[string]*db.SwitchTitle) map[string]IncompleteTitle { 23 | 24 | result := map[string]IncompleteTitle{} 25 | 26 | //iterate over local files, and compare to remote versions 27 | for idPrefix, switchFile := range localDB { 28 | 29 | if switchFile.BaseExist == false { 30 | zap.S().Infof("missing base for game %v", idPrefix) 31 | continue 32 | } 33 | 34 | if _, ok := switchDB[idPrefix]; !ok { 35 | continue 36 | } 37 | 38 | switchTitle := IncompleteTitle{Attributes: switchDB[idPrefix].Attributes, Meta: switchFile.File.Metadata} 39 | //sort the available local versions 40 | localVersions := make([]int, len(switchFile.Updates)) 41 | i := 0 42 | for k := range switchFile.Updates { 43 | localVersions[i] = k 44 | i++ 45 | } 46 | sort.Ints(localVersions) 47 | 48 | //sort the available remote versions 49 | remoteVersions := make([]int, len(switchDB[idPrefix].Updates)) 50 | i = 0 51 | for k := range switchDB[idPrefix].Updates { 52 | remoteVersions[i] = k 53 | i++ 54 | } 55 | sort.Ints(remoteVersions) 56 | switchTitle.LocalUpdate = 0 57 | switchTitle.LatestUpdate = 0 58 | if len(localVersions) != 0 { 59 | switchTitle.LocalUpdate = localVersions[len(localVersions)-1] 60 | } 61 | 62 | //process updates 63 | if len(remoteVersions) != 0 { 64 | switchTitle.LatestUpdate = remoteVersions[len(remoteVersions)-1] 65 | switchTitle.LatestUpdateDate = switchDB[idPrefix].Updates[remoteVersions[len(remoteVersions)-1]] 66 | if switchTitle.LocalUpdate < switchTitle.LatestUpdate { 67 | result[switchDB[idPrefix].Attributes.Id] = switchTitle 68 | } 69 | } 70 | 71 | if len(switchDB[idPrefix].Dlc) == 0 { 72 | continue 73 | } 74 | 75 | //process dlc 76 | for k, availableDlc := range switchDB[idPrefix].Dlc { 77 | 78 | if localDlc, ok := switchFile.Dlc[k]; ok { 79 | latestDlcVersion, err := availableDlc.Version.Int64() 80 | if err != nil { 81 | continue 82 | } 83 | 84 | if localDlc.Metadata == nil { 85 | continue 86 | } 87 | if localDlc.Metadata.Version < int(latestDlcVersion) { 88 | updateDate := "-" 89 | if availableDlc.ReleaseDate != 0 { 90 | updateDate = strconv.Itoa(availableDlc.ReleaseDate) 91 | if len(updateDate) > 7 { 92 | updateDate = updateDate[0:4] + "-" + updateDate[4:6] + "-" + updateDate[6:] 93 | } 94 | } 95 | 96 | result[availableDlc.Id] = IncompleteTitle{ 97 | Attributes: availableDlc, 98 | LatestUpdate: int(latestDlcVersion), 99 | LocalUpdate: localDlc.Metadata.Version, 100 | LatestUpdateDate: updateDate, 101 | Meta: localDlc.Metadata} 102 | } 103 | } 104 | } 105 | 106 | } 107 | return result 108 | } 109 | 110 | func ScanForMissingDLC(localDB map[string]*db.SwitchGameFiles, 111 | switchDB map[string]*db.SwitchTitle, ignoreTitleIds map[string]struct{}) map[string]IncompleteTitle { 112 | result := map[string]IncompleteTitle{} 113 | 114 | //iterate over local files, and compare to remote versions 115 | for idPrefix, switchFile := range localDB { 116 | 117 | if switchFile.BaseExist == false { 118 | continue 119 | } 120 | 121 | if _, ok := switchDB[idPrefix]; !ok { 122 | continue 123 | } 124 | switchTitle := IncompleteTitle{Attributes: switchDB[idPrefix].Attributes} 125 | 126 | //process dlc 127 | if len(switchDB[idPrefix].Dlc) != 0 { 128 | for k, v := range switchDB[idPrefix].Dlc { 129 | if _, ok := ignoreTitleIds[k]; ok { 130 | continue 131 | } 132 | 133 | if _, ok := switchFile.Dlc[k]; !ok { 134 | switchTitle.MissingDLC = append(switchTitle.MissingDLC, fmt.Sprintf("%v [%v]", v.Name, v.Id)) 135 | } 136 | } 137 | if len(switchTitle.MissingDLC) != 0 { 138 | result[switchDB[idPrefix].Attributes.Id] = switchTitle 139 | } 140 | } 141 | } 142 | return result 143 | } 144 | 145 | func ScanForBrokenFiles(localDB map[string]*db.SwitchGameFiles) []db.SwitchFileInfo { 146 | var result []db.SwitchFileInfo 147 | 148 | //iterate over local files, and compare to remote versions 149 | for _, switchFile := range localDB { 150 | 151 | if switchFile.BaseExist == false { 152 | for _, f := range switchFile.Dlc { 153 | result = append(result, f) 154 | } 155 | for _, f := range switchFile.Updates { 156 | result = append(result, f) 157 | } 158 | } 159 | } 160 | return result 161 | } 162 | -------------------------------------------------------------------------------- /process/organizefolderStructure.go: -------------------------------------------------------------------------------- 1 | package process 2 | 3 | import ( 4 | "github.com/dtrunk90/switch-library-manager-web/db" 5 | "github.com/dtrunk90/switch-library-manager-web/settings" 6 | "go.uber.org/zap" 7 | "io/ioutil" 8 | "os" 9 | "path" 10 | "path/filepath" 11 | "regexp" 12 | "robpike.io/nihongo" 13 | "strconv" 14 | "strings" 15 | ) 16 | 17 | var ( 18 | folderIllegalCharsRegex = regexp.MustCompile(`[/\\?%*:;=|"<>]`) 19 | nonAscii = regexp.MustCompile("[a-zA-Z0-9áéíóú@#%&',.\\s-\\[\\]\\(\\)\\+]") 20 | cjk = regexp.MustCompile("[\u2f70-\u2FA1\u3040-\u30ff\u3400-\u4dbf\u4e00-\u9fff\uf900-\ufaff\uff66-\uff9f\\p{Katakana}\\p{Hiragana}\\p{Hangul}]") 21 | ) 22 | 23 | func DeleteOldUpdates(dataFolder string, localDB *db.LocalSwitchFilesDB, updateProgress db.ProgressUpdater) { 24 | i := 0 25 | for k, v := range localDB.Skipped { 26 | switch v.ReasonCode { 27 | //case db.REASON_DUPLICATE: 28 | case db.REASON_OLD_UPDATE: 29 | fileToRemove := filepath.Join(k.BaseFolder, k.FileName) 30 | if updateProgress != nil { 31 | updateProgress.UpdateProgress(0, 0, "deleting "+fileToRemove) 32 | } 33 | zap.S().Infof("Deleting file: %v \n", fileToRemove) 34 | err := os.Remove(fileToRemove) 35 | if err != nil { 36 | zap.S().Errorf("Failed to delete file %v [%v]\n", fileToRemove, err) 37 | continue 38 | } 39 | i++ 40 | } 41 | 42 | } 43 | 44 | if i != 0 && settings.ReadSettings(dataFolder).OrganizeOptions.DeleteEmptyFolders { 45 | if updateProgress != nil { 46 | updateProgress.UpdateProgress(i, i+1, "deleting empty folders... (can take 1-2min)") 47 | } 48 | err := deleteEmptyFolders(dataFolder) 49 | if err != nil { 50 | zap.S().Errorf("Failed to delete empty folders [%v]\n", err) 51 | } 52 | if updateProgress != nil { 53 | updateProgress.UpdateProgress(i+1, i+1, "deleting empty folders... (can take 1-2min)") 54 | } 55 | } 56 | } 57 | 58 | func OrganizeByFolders(dataFolder string, 59 | localDB *db.LocalSwitchFilesDB, 60 | titlesDB *db.SwitchTitlesDB, 61 | updateProgress db.ProgressUpdater) { 62 | 63 | //validate template rules 64 | 65 | options := settings.ReadSettings(dataFolder).OrganizeOptions 66 | if !IsOptionsValid(options) { 67 | zap.S().Error("the organize options in settings.json are not valid, please check that the template contains file/folder name") 68 | return 69 | } 70 | i := 0 71 | tasksSize := len(localDB.TitlesMap) + 2 72 | for k, v := range localDB.TitlesMap { 73 | i++ 74 | if !v.BaseExist { 75 | continue 76 | } 77 | 78 | if updateProgress != nil { 79 | updateProgress.UpdateProgress(i, tasksSize, v.File.ExtendedInfo.FileName) 80 | } 81 | 82 | titleName := getTitleName(titlesDB.TitlesMap[k], v) 83 | 84 | templateData := map[string]string{} 85 | 86 | templateData[settings.TEMPLATE_TITLE_ID] = v.File.Metadata.TitleId 87 | //templateData[settings.TEMPLATE_TYPE] = "BASE" 88 | templateData[settings.TEMPLATE_TITLE_NAME] = titleName 89 | templateData[settings.TEMPLATE_VERSION_TXT] = "" 90 | if _, ok := titlesDB.TitlesMap[k]; ok { 91 | templateData[settings.TEMPLATE_REGION] = titlesDB.TitlesMap[k].Attributes.Region 92 | } 93 | templateData[settings.TEMPLATE_VERSION] = "0" 94 | 95 | if v.File.Metadata.Ncap != nil { 96 | templateData[settings.TEMPLATE_VERSION_TXT] = v.File.Metadata.Ncap.DisplayVersion 97 | } 98 | 99 | var destinationPath = v.File.ExtendedInfo.BaseFolder 100 | 101 | //create folder if needed 102 | if options.CreateFolderPerGame { 103 | folderToCreate := getFolderName(options, templateData) 104 | destinationPath = filepath.Join(dataFolder, folderToCreate) 105 | if _, err := os.Stat(destinationPath); os.IsNotExist(err) { 106 | err = os.Mkdir(destinationPath, os.ModePerm) 107 | if err != nil { 108 | zap.S().Errorf("Failed to create folder %v - %v\n", folderToCreate, err) 109 | continue 110 | } 111 | } 112 | } 113 | 114 | if v.IsSplit { 115 | //in case of a split file, we only rename the folder and then move all the split 116 | //files with the new folder 117 | files, err := ioutil.ReadDir(v.File.ExtendedInfo.BaseFolder) 118 | if err != nil { 119 | continue 120 | } 121 | 122 | for _, file := range files { 123 | if _, err := strconv.Atoi(file.Name()[len(file.Name())-1:]); err == nil { 124 | from := filepath.Join(v.File.ExtendedInfo.BaseFolder, file.Name()) 125 | to := filepath.Join(destinationPath, file.Name()) 126 | err := moveFile(from, to) 127 | if err != nil { 128 | zap.S().Errorf("Failed to move file [%v]\n", err) 129 | continue 130 | } 131 | } 132 | } 133 | continue 134 | 135 | } 136 | 137 | //process base title 138 | from := filepath.Join(v.File.ExtendedInfo.BaseFolder, v.File.ExtendedInfo.FileName) 139 | to := filepath.Join(destinationPath, getFileName(options, v.File.ExtendedInfo.FileName, templateData)) 140 | err := moveFile(from, to) 141 | if err != nil { 142 | zap.S().Errorf("Failed to move file [%v]\n", err) 143 | continue 144 | } 145 | 146 | //process updates 147 | for update, updateInfo := range v.Updates { 148 | if updateInfo.Metadata != nil { 149 | templateData[settings.TEMPLATE_TITLE_ID] = updateInfo.Metadata.TitleId 150 | } 151 | templateData[settings.TEMPLATE_VERSION] = strconv.Itoa(update) 152 | templateData[settings.TEMPLATE_TYPE] = "UPD" 153 | if updateInfo.Metadata.Ncap != nil { 154 | templateData[settings.TEMPLATE_VERSION_TXT] = updateInfo.Metadata.Ncap.DisplayVersion 155 | } else { 156 | templateData[settings.TEMPLATE_VERSION_TXT] = "" 157 | } 158 | 159 | from = filepath.Join(updateInfo.ExtendedInfo.BaseFolder, updateInfo.ExtendedInfo.FileName) 160 | if options.CreateFolderPerGame { 161 | to = filepath.Join(destinationPath, getFileName(options, updateInfo.ExtendedInfo.FileName, templateData)) 162 | } else { 163 | to = filepath.Join(updateInfo.ExtendedInfo.BaseFolder, getFileName(options, updateInfo.ExtendedInfo.FileName, templateData)) 164 | } 165 | err := moveFile(from, to) 166 | if err != nil { 167 | zap.S().Errorf("Failed to move file [%v]\n", err) 168 | continue 169 | } 170 | } 171 | 172 | //process DLC 173 | for id, dlc := range v.Dlc { 174 | if dlc.Metadata != nil { 175 | templateData[settings.TEMPLATE_VERSION] = strconv.Itoa(dlc.Metadata.Version) 176 | } 177 | templateData[settings.TEMPLATE_TYPE] = "DLC" 178 | templateData[settings.TEMPLATE_TITLE_ID] = id 179 | templateData[settings.TEMPLATE_DLC_NAME] = getDlcName(titlesDB.TitlesMap[k], dlc) 180 | from = filepath.Join(dlc.ExtendedInfo.BaseFolder, dlc.ExtendedInfo.FileName) 181 | if options.CreateFolderPerGame { 182 | to = filepath.Join(destinationPath, getFileName(options, dlc.ExtendedInfo.FileName, templateData)) 183 | } else { 184 | to = filepath.Join(dlc.ExtendedInfo.BaseFolder, getFileName(options, dlc.ExtendedInfo.FileName, templateData)) 185 | } 186 | err = moveFile(from, to) 187 | if err != nil { 188 | zap.S().Errorf("Failed to move file [%v]\n", err) 189 | continue 190 | } 191 | } 192 | } 193 | 194 | if options.DeleteEmptyFolders { 195 | if updateProgress != nil { 196 | i += 1 197 | updateProgress.UpdateProgress(i, tasksSize, "deleting empty folders... (can take 1-2min)") 198 | } 199 | err := deleteEmptyFolders(dataFolder) 200 | if err != nil { 201 | zap.S().Errorf("Failed to delete empty folders [%v]\n", err) 202 | } 203 | if updateProgress != nil { 204 | i += 1 205 | updateProgress.UpdateProgress(i, tasksSize, "done") 206 | } 207 | } else { 208 | if updateProgress != nil { 209 | i += 2 210 | updateProgress.UpdateProgress(i, tasksSize, "done") 211 | } 212 | } 213 | } 214 | 215 | func IsOptionsValid(options settings.OrganizeOptions) bool { 216 | if options.RenameFiles { 217 | if options.FileNameTemplate == "" { 218 | zap.S().Error("file name template cannot be empty") 219 | return false 220 | } 221 | if !strings.Contains(options.FileNameTemplate, settings.TEMPLATE_TITLE_NAME) && 222 | !strings.Contains(options.FileNameTemplate, settings.TEMPLATE_TITLE_ID) { 223 | zap.S().Error("file name template needs to contain one of the following - titleId or title name") 224 | return false 225 | } 226 | 227 | } 228 | 229 | if options.CreateFolderPerGame { 230 | if options.FolderNameTemplate == "" { 231 | zap.S().Error("folder name template cannot be empty") 232 | return false 233 | } 234 | if !strings.Contains(options.FolderNameTemplate, settings.TEMPLATE_TITLE_NAME) && 235 | !strings.Contains(options.FolderNameTemplate, settings.TEMPLATE_TITLE_ID) { 236 | zap.S().Error("folder name template needs to contain one of the following - titleId or title name") 237 | return false 238 | } 239 | } 240 | return true 241 | } 242 | 243 | func getDlcName(switchTitle *db.SwitchTitle, file db.SwitchFileInfo) string { 244 | if switchTitle == nil { 245 | return "" 246 | } 247 | if dlcAttributes, ok := switchTitle.Dlc[file.Metadata.TitleId]; ok { 248 | name := dlcAttributes.Name 249 | name = strings.ReplaceAll(name, "\n", " ") 250 | return name 251 | } 252 | return "" 253 | } 254 | 255 | func getTitleName(switchTitle *db.SwitchTitle, v *db.SwitchGameFiles) string { 256 | if switchTitle != nil && switchTitle.Attributes.Name != "" { 257 | res := cjk.FindAllString(switchTitle.Attributes.Name, -1) 258 | if len(res) == 0 { 259 | return switchTitle.Attributes.Name 260 | } 261 | } 262 | 263 | if v.File.Metadata.Ncap != nil { 264 | name := v.File.Metadata.Ncap.TitleName["AmericanEnglish"].Title 265 | if name != "" { 266 | return name 267 | } 268 | } 269 | //for non eshop games (cartridge only), grab the name from the file 270 | return db.ParseTitleNameFromFileName(v.File.ExtendedInfo.FileName) 271 | 272 | } 273 | 274 | func getFolderName(options settings.OrganizeOptions, templateData map[string]string) string { 275 | 276 | return applyTemplate(templateData, options.SwitchSafeFileNames, options.FolderNameTemplate) 277 | } 278 | 279 | func getFileName(options settings.OrganizeOptions, originalName string, templateData map[string]string) string { 280 | if !options.RenameFiles { 281 | return originalName 282 | } 283 | ext := path.Ext(originalName) 284 | result := applyTemplate(templateData, options.SwitchSafeFileNames, options.FileNameTemplate) 285 | return result + ext 286 | } 287 | 288 | func moveFile(from string, to string) error { 289 | if from == to { 290 | return nil 291 | } 292 | err := os.Rename(from, to) 293 | return err 294 | } 295 | 296 | func applyTemplate(templateData map[string]string, useSafeNames bool, template string) string { 297 | result := strings.Replace(template, "{"+settings.TEMPLATE_TITLE_NAME+"}", templateData[settings.TEMPLATE_TITLE_NAME], 1) 298 | result = strings.Replace(result, "{"+settings.TEMPLATE_TITLE_ID+"}", strings.ToUpper(templateData[settings.TEMPLATE_TITLE_ID]), 1) 299 | result = strings.Replace(result, "{"+settings.TEMPLATE_VERSION+"}", templateData[settings.TEMPLATE_VERSION], 1) 300 | result = strings.Replace(result, "{"+settings.TEMPLATE_TYPE+"}", templateData[settings.TEMPLATE_TYPE], 1) 301 | result = strings.Replace(result, "{"+settings.TEMPLATE_VERSION_TXT+"}", templateData[settings.TEMPLATE_VERSION_TXT], 1) 302 | result = strings.Replace(result, "{"+settings.TEMPLATE_REGION+"}", templateData[settings.TEMPLATE_REGION], 1) 303 | //remove title name from dlc name 304 | dlcName := strings.Replace(templateData[settings.TEMPLATE_DLC_NAME], templateData[settings.TEMPLATE_TITLE_NAME], "", 1) 305 | dlcName = strings.TrimSpace(dlcName) 306 | dlcName = strings.TrimPrefix(dlcName, "-") 307 | dlcName = strings.TrimSpace(dlcName) 308 | result = strings.Replace(result, "{"+settings.TEMPLATE_DLC_NAME+"}", dlcName, 1) 309 | result = strings.ReplaceAll(result, "[]", "") 310 | result = strings.ReplaceAll(result, "()", "") 311 | result = strings.ReplaceAll(result, "<>", "") 312 | if strings.HasSuffix(result, ".") { 313 | result = result[:len(result)-1] 314 | } 315 | 316 | if useSafeNames { 317 | result = nihongo.RomajiString(result) 318 | safe := nonAscii.FindAllString(result, -1) 319 | result = strings.Join(safe, "") 320 | } 321 | result = strings.ReplaceAll(result, " ", " ") 322 | result = strings.TrimSpace(result) 323 | return folderIllegalCharsRegex.ReplaceAllString(result, "") 324 | } 325 | 326 | func deleteEmptyFolders(path string) error { 327 | err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error { 328 | if err != nil { 329 | zap.S().Error("Error while deleting empty folders", err) 330 | } 331 | if info != nil && info.IsDir() { 332 | err = deleteEmptyFolder(path) 333 | if err != nil { 334 | zap.S().Error("Error while deleting empty folders", err) 335 | } 336 | } 337 | 338 | return nil 339 | }) 340 | return err 341 | } 342 | 343 | func deleteEmptyFolder(path string) error { 344 | files, err := ioutil.ReadDir(path) 345 | if err != nil { 346 | return err 347 | } 348 | 349 | if len(files) != 0 { 350 | return nil 351 | } 352 | 353 | zap.S().Infof("\nDeleting empty folder [%v]", path) 354 | _ = os.Remove(path) 355 | 356 | return nil 357 | } 358 | -------------------------------------------------------------------------------- /process/organizefolderStructure_test.go: -------------------------------------------------------------------------------- 1 | package process 2 | 3 | import ( 4 | "robpike.io/nihongo" 5 | "strings" 6 | "testing" 7 | ) 8 | 9 | //var folderIllegalCharsRegex = regexp.MustCompile(`[./\\?%*:;=|"<>]`) 10 | 11 | func TestRename(t *testing.T) { 12 | name := "Pokémon™: Let’s Go, Eevee! 포탈 나이츠" 13 | name = folderIllegalCharsRegex.ReplaceAllString(name, "") 14 | safe := cjk.FindAllString(name, -1) 15 | name = strings.Join(safe, "") 16 | name = nihongo.RomajiString(name) 17 | } 18 | -------------------------------------------------------------------------------- /resources/layout.html: -------------------------------------------------------------------------------- 1 | {{- define "layout" -}} 2 | 3 | 4 | 5 | 6 | 7 | {{template "title" .}} - Switch Library Manager Web 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 |
21 | 76 |
77 |
78 |
79 | {{- if not .IsKeysFileAvailable}} 80 | 85 | {{- end}} 86 | {{- template "main" .}} 87 |
88 |
89 | 94 | 95 | 96 | 97 | {{end}} 98 | -------------------------------------------------------------------------------- /resources/pages/dlc.html: -------------------------------------------------------------------------------- 1 | {{define "title"}}DLC{{end}} 2 | {{define "main"}} 3 |
4 |
5 |

DLC {{.Pagination.NumItems}}

6 | {{- template "filter" .}} 7 |
8 |
9 | {{- if gt (len .TitleItems) 0}} 10 |
11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | {{- range .TitleItems}} 21 | 22 | 38 | 39 | 46 | 47 | {{- end}} 48 | 49 |
Title# MissingMissing DLC
23 |
24 |
25 | {{- if .Region}} 26 |
27 | {{.Region}} 28 |
29 | {{- end}} 30 | {{.Name}} 31 |
32 |
33 | {{.Name}}
34 | ID: {{.Id}} 35 |
36 |
37 |
{{len .MissingDLC}} 40 |
    41 | {{- range .MissingDLC}} 42 |
  • {{.}}
  • 43 | {{- end}} 44 |
45 |
50 |
51 | {{- template "pagination" .}} 52 | {{- else}} 53 | 57 | {{- end}} 58 | {{- end}} 59 | -------------------------------------------------------------------------------- /resources/pages/index.html: -------------------------------------------------------------------------------- 1 | {{define "title"}}Library{{end}} 2 | {{define "main"}} 3 |
4 |
5 |

Library {{.Pagination.NumItems}}

6 | {{- template "filter" .}} 7 |
8 |
9 | {{- if gt (len .TitleItems) 0}} 10 |
11 | {{- range .TitleItems}} 12 | {{- template "card" .}} 13 | {{- end}} 14 |
15 | {{- template "pagination" .}} 16 | {{- else}} 17 | 21 | {{- end}} 22 | {{- end}} 23 | -------------------------------------------------------------------------------- /resources/pages/issues.html: -------------------------------------------------------------------------------- 1 | {{define "title"}}Issues{{end}} 2 | {{define "main"}} 3 |
4 |
5 |

Issues {{len .Issues}}

6 |
7 |
8 | {{- if gt (len .Issues) 0}} 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | {{- range .Issues}} 18 | 19 | 20 | 21 | 22 | {{- end}} 23 | 24 |
KeyValue
{{.File}}{{.Reason}}
25 | {{- else}} 26 | 30 | {{- end}} 31 | {{- end}} 32 | -------------------------------------------------------------------------------- /resources/pages/missing.html: -------------------------------------------------------------------------------- 1 | {{define "title"}}Missing Games{{end}} 2 | {{define "main"}} 3 |
4 |
5 |

Missing Games {{.Pagination.NumItems}}

6 | {{- template "filter" .}} 7 |
8 |
9 | {{- if gt (len .TitleItems) 0}} 10 |
11 | {{- range .TitleItems}} 12 | {{- template "card" .}} 13 | {{- end}} 14 |
15 | {{- template "pagination" .}} 16 | {{- else}} 17 | 21 | {{- end}} 22 | {{- end}} 23 | -------------------------------------------------------------------------------- /resources/pages/settings.html: -------------------------------------------------------------------------------- 1 | {{define "title"}}Settings{{end}} 2 | {{define "main"}} 3 |
4 |
5 |

Settings

6 |
7 |
8 |
9 |
10 | 11 | 12 |
13 |
14 | 15 | 21 |
One folder per Line
22 |
23 |
24 | 25 | 30 |
One Title ID per Line
31 |
32 |
33 | 34 |
35 |
36 | {{- end}} 37 | -------------------------------------------------------------------------------- /resources/pages/updates.html: -------------------------------------------------------------------------------- 1 | {{define "title"}}Updates{{end}} 2 | {{define "main"}} 3 |
4 |
5 |

Updates {{.Pagination.NumItems}}

6 | {{- template "filter" .}} 7 |
8 |
9 | {{- if gt (len .TitleItems) 0}} 10 |
11 | {{- range .TitleItems}} 12 | {{- template "card" .}} 13 | {{- end}} 14 |
15 | {{- template "pagination" .}} 16 | {{- else}} 17 | 21 | {{- end}} 22 | {{- end}} 23 | -------------------------------------------------------------------------------- /resources/partials/card.html: -------------------------------------------------------------------------------- 1 | {{- define "card"}} 2 |
3 |
4 |
5 | {{- if or .Type .Region}} 6 |
7 | {{- if .Type}} 8 | {{.Type}} 9 | {{- end}} 10 | {{- if .Region}} 11 | {{.Region}} 12 | {{- end}} 13 |
14 | {{- end}} 15 | {{.Name}} 16 |
17 |
18 |
{{.Name}}
19 | {{- if .LatestUpdate}} 20 |

21 | Update: {{.LocalUpdate}} → {{.LatestUpdate}} 22 |

23 | {{- else if .LocalUpdate}} 24 |

25 | Version: {{if .Version}}{{.Version}}{{else}}Unknown{{end}} ({{.LocalUpdate}}) 26 |

27 | {{- end}} 28 | {{- if or (and .ReleaseDate (not .ReleaseDate.IsZero)) (and .LatestUpdateDate (not .LatestUpdateDate.IsZero))}} 29 |

30 | {{- if and .ReleaseDate (not .ReleaseDate.IsZero)}} 31 |

Release Date: {{.ReleaseDate | formatTime}}
32 | {{- end}} 33 | {{- if and .LatestUpdateDate (not .LatestUpdateDate.IsZero)}} 34 |
Latest Update Date: {{.LatestUpdateDate | formatTime}}
35 | {{- end}} 36 |

37 | {{- end}} 38 |

ID: {{.Id}}

39 |
40 |
41 |
42 | {{- end}} 43 | -------------------------------------------------------------------------------- /resources/partials/filter.html: -------------------------------------------------------------------------------- 1 | {{- define "filter"}} 2 |
3 |
4 |
5 | 6 | 7 |
8 | 9 | 10 | 11 |
12 |
13 | 18 |
19 | 36 |
37 | 38 | 39 | 40 | 41 |
42 |
43 | 44 |
45 |
46 | {{- end}} 47 | -------------------------------------------------------------------------------- /resources/partials/pagination.html: -------------------------------------------------------------------------------- 1 | {{- define "pagination"}} 2 |
3 |
4 | 47 |
48 |
49 | {{- end}} 50 | -------------------------------------------------------------------------------- /resources/static/android-chrome-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/android-chrome-192x192.png -------------------------------------------------------------------------------- /resources/static/android-chrome-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/android-chrome-512x512.png -------------------------------------------------------------------------------- /resources/static/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/apple-touch-icon.png -------------------------------------------------------------------------------- /resources/static/browserconfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | #0a0a0a 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /resources/static/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/favicon-16x16.png -------------------------------------------------------------------------------- /resources/static/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/favicon-32x32.png -------------------------------------------------------------------------------- /resources/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/favicon.ico -------------------------------------------------------------------------------- /resources/static/icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 35 | 37 | 41 | 45 | 49 | 53 | 57 | 61 | 65 | 70 | 76 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /resources/static/mstile-144x144.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/mstile-144x144.png -------------------------------------------------------------------------------- /resources/static/mstile-150x150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/mstile-150x150.png -------------------------------------------------------------------------------- /resources/static/mstile-310x150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/mstile-310x150.png -------------------------------------------------------------------------------- /resources/static/mstile-310x310.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/mstile-310x310.png -------------------------------------------------------------------------------- /resources/static/mstile-70x70.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/mstile-70x70.png -------------------------------------------------------------------------------- /resources/static/noimage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dtrunk90/switch-library-manager-web/eba86f6d84926f090c9961eeabbddc4eff18f9b6/resources/static/noimage.png -------------------------------------------------------------------------------- /resources/static/safari-pinned-tab.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 7 | 8 | Created by potrace 1.14, written by Peter Selinger 2001-2017 9 | 10 | 12 | 15 | 18 | 29 | 31 | 34 | 36 | 38 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /resources/static/site.webmanifest: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Switch Library Manager Web", 3 | "short_name": "Switch Library Manager Web", 4 | "icons": [ 5 | { 6 | "src": "/resources/static/android-chrome-192x192.png", 7 | "sizes": "192x192", 8 | "type": "image/png" 9 | }, 10 | { 11 | "src": "/resources/static/android-chrome-512x512.png", 12 | "sizes": "512x512", 13 | "type": "image/png" 14 | } 15 | ], 16 | "theme_color": "#0a0a0a", 17 | "background_color": "#0a0a0a", 18 | "display": "standalone" 19 | } 20 | -------------------------------------------------------------------------------- /resources/web.js: -------------------------------------------------------------------------------- 1 | /*jshint esversion: 9 */ 2 | /*globals bootstrap */ 3 | 4 | function insertAlert(element, contextualClass, iconClass, strongMessage, message, dismissible = true, id = "") { 5 | const alert = document.createElement('div'); 6 | alert.classList.add('alert', contextualClass, 'd-flex', 'align-items-center', 'fade', 'show'); 7 | if (dismissible) { 8 | alert.classList.add('alert-dismissible'); 9 | } 10 | alert.setAttribute('role', 'alert'); 11 | if (id != "") { 12 | alert.id = id; 13 | } 14 | 15 | const icon = document.createElement('span'); 16 | icon.classList.add('bi', iconClass, 'flex-shrink-0', 'me-2'); 17 | alert.appendChild(icon); 18 | 19 | const fullMessageWrapper = document.createElement('div'); 20 | 21 | if (strongMessage) { 22 | const strongMessageWrapper = document.createElement('strong'); 23 | strongMessageWrapper.appendChild(document.createTextNode(strongMessage)); 24 | fullMessageWrapper.appendChild(strongMessageWrapper); 25 | fullMessageWrapper.appendChild(document.createTextNode(' ')); 26 | } 27 | 28 | fullMessageWrapper.appendChild(document.createTextNode(message)); 29 | alert.appendChild(fullMessageWrapper); 30 | 31 | if (dismissible) { 32 | const closeBtn = document.createElement('button'); 33 | closeBtn.setAttribute('aria-label', 'Close'); 34 | closeBtn.setAttribute('type', 'button'); 35 | closeBtn.classList.add('btn-close'); 36 | closeBtn.dataset.bsDismiss = 'alert'; 37 | alert.appendChild(closeBtn); 38 | } 39 | 40 | element.insertBefore(alert, element.firstChild); 41 | } 42 | 43 | function onSubmit(form) { 44 | const feedbackAlert = form.querySelector('.alert'); 45 | if (feedbackAlert) { 46 | form.removeChild(feedbackAlert); 47 | } 48 | 49 | form.querySelectorAll('.is-invalid').forEach(e => e.classList.remove('is-invalid')); 50 | form.querySelectorAll('.invalid-feedback').forEach(e => e.parentNode.removeChild(e)); 51 | 52 | const data = new FormData(form); 53 | fetch(form.action || window.location.href, { 54 | method: 'POST', 55 | body: new URLSearchParams(data).toString(), 56 | headers: { 57 | 'Content-type': 'application/x-www-form-urlencoded' 58 | } 59 | }).then(response => { 60 | if (!response.ok) { 61 | throw response; 62 | } 63 | 64 | response.json().then(jsonResponse => { 65 | insertAlert(form, 'alert-success', 'bi-check-circle-fill', jsonResponse.strongMessage, jsonResponse.message); 66 | }); 67 | }).catch(error => { 68 | error.json().then(jsonResponse => { 69 | if (jsonResponse.globalError.strongMessage || jsonResponse.globalError.message) { 70 | insertAlert(form, 'alert-danger', 'bi-exclamation-triangle-fill', jsonResponse.globalError.strongMessage, jsonResponse.globalError.message); 71 | } else if (jsonResponse.fieldErrors) { 72 | jsonResponse.fieldErrors.forEach(fieldError => { 73 | const validationFeedback = document.createElement('div'); 74 | validationFeedback.id = `validation-feedback-${fieldError.field}`; 75 | validationFeedback.classList.add('invalid-feedback'); 76 | validationFeedback.appendChild(document.createTextNode(fieldError.message)); 77 | 78 | const field = form.querySelector(`[name="${fieldError.field}"]`); 79 | field.setAttribute('aria-describedby', `validation-feedback-${fieldError.field}`); 80 | field.classList.add('is-invalid'); 81 | 82 | field.parentElement.appendChild(validationFeedback); 83 | }); 84 | } 85 | }); 86 | }); 87 | } 88 | 89 | function checkSyncStatus() { 90 | let checkAgain = true; 91 | 92 | fetch('/sync', { method: 'GET' }).then(response => response.json().then(isSynchronizing => { 93 | if (!isSynchronizing) { 94 | document.getElementById('alert_sync').remove(); 95 | checkAgain = false; 96 | } 97 | })); 98 | 99 | if (checkAgain) { 100 | setTimeout(checkSyncStatus, 5000); 101 | } 102 | } 103 | 104 | document.addEventListener('DOMContentLoaded', () => { 105 | const tooltipTriggerList = document.querySelectorAll('[data-bs-toggle="tooltip"]'); 106 | [...tooltipTriggerList].map(tooltipTriggerEl => new bootstrap.Tooltip(tooltipTriggerEl)); 107 | 108 | const sync = document.getElementById('sync'); 109 | sync.addEventListener('click', e => { 110 | e.preventDefault(); 111 | fetch(sync.href, { method: 'POST' }); 112 | insertAlert(document.querySelector('main > .container-fluid'), 'alert-info', 'bi-info-circle-fill', "Synchronizing!", "Titles are getting synchronized.", false, "alert_sync"); 113 | checkSyncStatus(); 114 | }); 115 | 116 | const settingsForm = document.getElementById('settingsForm'); 117 | if (settingsForm) { 118 | settingsForm.addEventListener('submit', e => { 119 | e.preventDefault(); 120 | onSubmit(settingsForm); 121 | }); 122 | } 123 | }, false); 124 | -------------------------------------------------------------------------------- /resources/web.scss: -------------------------------------------------------------------------------- 1 | /* 2 | * Bootstrap 3 | */ 4 | @import "../node_modules/bootstrap/scss/mixins/banner"; 5 | @include bsBanner(""); 6 | 7 | 8 | // scss-docs-start import-stack 9 | // Configuration 10 | @import "../node_modules/bootstrap/scss/functions"; 11 | @import "../node_modules/bootstrap/scss/variables"; 12 | @import "../node_modules/bootstrap/scss/variables-dark"; 13 | @import "../node_modules/bootstrap/scss/maps"; 14 | @import "../node_modules/bootstrap/scss/mixins"; 15 | @import "../node_modules/bootstrap/scss/utilities"; 16 | 17 | // Default variable overrides 18 | $pagination-color: $white; 19 | $pagination-focus-color: $white; 20 | $pagination-hover-color: $white; 21 | $pagination-active-bg: $secondary; 22 | $pagination-active-border-color: $pagination-active-bg; 23 | 24 | $tooltip-color: $dark; 25 | $tooltip-bg: $light; 26 | 27 | // Layout & components 28 | @import "../node_modules/bootstrap/scss/root"; 29 | @import "../node_modules/bootstrap/scss/reboot"; 30 | @import "../node_modules/bootstrap/scss/type"; 31 | @import "../node_modules/bootstrap/scss/images"; 32 | @import "../node_modules/bootstrap/scss/containers"; 33 | @import "../node_modules/bootstrap/scss/grid"; 34 | @import "../node_modules/bootstrap/scss/tables"; 35 | @import "../node_modules/bootstrap/scss/forms"; 36 | @import "../node_modules/bootstrap/scss/buttons"; 37 | @import "../node_modules/bootstrap/scss/transitions"; 38 | @import "../node_modules/bootstrap/scss/dropdown"; 39 | @import "../node_modules/bootstrap/scss/button-group"; 40 | @import "../node_modules/bootstrap/scss/nav"; 41 | @import "../node_modules/bootstrap/scss/navbar"; 42 | @import "../node_modules/bootstrap/scss/card"; 43 | @import "../node_modules/bootstrap/scss/accordion"; 44 | @import "../node_modules/bootstrap/scss/breadcrumb"; 45 | @import "../node_modules/bootstrap/scss/pagination"; 46 | @import "../node_modules/bootstrap/scss/badge"; 47 | @import "../node_modules/bootstrap/scss/alert"; 48 | @import "../node_modules/bootstrap/scss/progress"; 49 | @import "../node_modules/bootstrap/scss/list-group"; 50 | @import "../node_modules/bootstrap/scss/close"; 51 | @import "../node_modules/bootstrap/scss/toasts"; 52 | @import "../node_modules/bootstrap/scss/modal"; 53 | @import "../node_modules/bootstrap/scss/tooltip"; 54 | @import "../node_modules/bootstrap/scss/popover"; 55 | @import "../node_modules/bootstrap/scss/carousel"; 56 | @import "../node_modules/bootstrap/scss/spinners"; 57 | @import "../node_modules/bootstrap/scss/offcanvas"; 58 | @import "../node_modules/bootstrap/scss/placeholders"; 59 | 60 | // Helpers 61 | @import "../node_modules/bootstrap/scss/helpers"; 62 | 63 | // Utilities 64 | @import "../node_modules/bootstrap/scss/utilities/api"; 65 | // scss-docs-end import-stack 66 | 67 | /* 68 | * Bootstrap Icons 69 | */ 70 | $bootstrap-icons-font-dir: "/resources/vendor/bootstrap-icons/font/fonts"; 71 | @import "../node_modules/bootstrap-icons/font/bootstrap-icons"; 72 | 73 | /* 74 | * Flag Icons 75 | */ 76 | $flag-icons-path: "/resources/vendor/flag-icons/flags"; 77 | @import "../node_modules/flag-icons/sass/flag-icons"; 78 | 79 | /* 80 | * Other 81 | */ 82 | .card-img-top { 83 | object-fit: cover; 84 | height: 94vw; 85 | width: 100%; 86 | } 87 | 88 | .td-img { 89 | object-fit: cover; 90 | height: 26vw; 91 | width: auto; 92 | } 93 | 94 | @include media-breakpoint-up(sm) { 95 | .card-img-top { 96 | height: 45vw; 97 | } 98 | 99 | .td-img { 100 | height: 18vw; 101 | } 102 | } 103 | 104 | @include media-breakpoint-up(md) { 105 | .card-img-top { 106 | height: 30vw; 107 | } 108 | 109 | .td-img { 110 | height: 15vw; 111 | } 112 | } 113 | 114 | @include media-breakpoint-up(lg) { 115 | .card-img-top { 116 | height: 22vw; 117 | } 118 | } 119 | 120 | @include media-breakpoint-up(xl) { 121 | .card-img-top { 122 | height: 18vw; 123 | } 124 | } 125 | 126 | @include media-breakpoint-up(xxl) { 127 | .card-img-top { 128 | height: 15vw; 129 | } 130 | } 131 | 132 | .bs-tooltip-auto { 133 | &[data-popper-placement^="top"] .tooltip-arrow::before { 134 | border-top-color: $black; 135 | } 136 | 137 | &[data-popper-placement^="right"] .tooltip-arrow::before { 138 | border-right-color: $black; 139 | } 140 | 141 | &[data-popper-placement^="bottom"] .tooltip-arrow::before { 142 | border-bottom-color: $black; 143 | } 144 | 145 | &[data-popper-placement^="left"] .tooltip-arrow::before { 146 | border-left-color: $black; 147 | } 148 | } 149 | 150 | .tooltip-inner { 151 | background-color: $black; 152 | color: $white; 153 | } 154 | -------------------------------------------------------------------------------- /settings/keys.go: -------------------------------------------------------------------------------- 1 | package settings 2 | 3 | import ( 4 | "errors" 5 | "github.com/magiconair/properties" 6 | "path/filepath" 7 | ) 8 | 9 | var ( 10 | keysInstance *switchKeys 11 | ) 12 | 13 | type switchKeys struct { 14 | keys map[string]string 15 | } 16 | 17 | func (k *switchKeys) GetKey(keyName string) string { 18 | return k.keys[keyName] 19 | } 20 | 21 | func SwitchKeys() (*switchKeys, error) { 22 | return keysInstance, nil 23 | } 24 | 25 | func InitSwitchKeys(dataFolder string) (*switchKeys, error) { 26 | settings := ReadSettings(dataFolder) 27 | path := settings.Prodkeys 28 | keys, err := GetSwitchKeys(path) 29 | 30 | if err != nil { 31 | return nil, errors.New("Error trying to read prod.keys [reason:" + err.Error() + "]") 32 | } 33 | 34 | settings.Prodkeys = path 35 | SaveSettings(settings, dataFolder) 36 | keysInstance = &switchKeys{keys: keys} 37 | 38 | return keysInstance, nil 39 | } 40 | 41 | func GetSwitchKeys(path string) (map[string]string, error) { 42 | keys := map[string]string{} 43 | 44 | p, err := properties.LoadFile(filepath.Join(path, "prod.keys"), properties.UTF8) 45 | 46 | if err != nil { 47 | return keys, err 48 | } 49 | 50 | for _, key := range p.Keys() { 51 | value, _ := p.Get(key) 52 | keys[key] = value 53 | } 54 | 55 | return keys, nil 56 | } 57 | 58 | func IsKeysFileAvailable() bool { 59 | if keys, _ := SwitchKeys(); keys != nil && keys.GetKey("header_key") != "" { 60 | return true 61 | } 62 | 63 | return false 64 | } 65 | -------------------------------------------------------------------------------- /settings/settings.go: -------------------------------------------------------------------------------- 1 | package settings 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "go.uber.org/zap" 7 | "io/ioutil" 8 | "os" 9 | "path/filepath" 10 | ) 11 | 12 | var ( 13 | settingsInstance *AppSettings 14 | ) 15 | 16 | const ( 17 | SETTINGS_FILENAME = "settings.json" 18 | TITLE_JSON_FILENAME = "titles.json" 19 | VERSIONS_JSON_FILENAME = "versions.json" 20 | SLM_VERSION = "1.4.0" 21 | SLM_WEB_VERSION = "1.0.12" 22 | TITLES_JSON_URL = "https://tinfoil.io/repo/db/titles.json" 23 | //VERSIONS_JSON_URL = "https://tinfoil.io/repo/db/versions.json" 24 | VERSIONS_JSON_URL = "https://raw.githubusercontent.com/blawar/titledb/master/versions.json" 25 | ) 26 | 27 | const ( 28 | TEMPLATE_TITLE_ID = "TITLE_ID" 29 | TEMPLATE_TITLE_NAME = "TITLE_NAME" 30 | TEMPLATE_DLC_NAME = "DLC_NAME" 31 | TEMPLATE_VERSION = "VERSION" 32 | TEMPLATE_REGION = "REGION" 33 | TEMPLATE_VERSION_TXT = "VERSION_TXT" 34 | TEMPLATE_TYPE = "TYPE" 35 | ) 36 | 37 | type OrganizeOptions struct { 38 | CreateFolderPerGame bool `json:"create_folder_per_game"` 39 | RenameFiles bool `json:"rename_files"` 40 | DeleteEmptyFolders bool `json:"delete_empty_folders"` 41 | DeleteOldUpdateFiles bool `json:"delete_old_update_files"` 42 | FolderNameTemplate string `json:"folder_name_template"` 43 | SwitchSafeFileNames bool `json:"switch_safe_file_names"` 44 | FileNameTemplate string `json:"file_name_template"` 45 | } 46 | 47 | type AppSettings struct { 48 | VersionsEtag string `json:"versions_etag"` 49 | TitlesEtag string `json:"titles_etag"` 50 | Prodkeys string `json:"prod_keys"` 51 | Folder string `json:"folder"` 52 | ScanFolders []string `json:"scan_folders"` 53 | Port int `json:"port"` 54 | Debug bool `json:"debug"` 55 | OrganizeOptions OrganizeOptions `json:"organize_options"` 56 | IgnoreDLCTitleIds []string `json:"ignore_dlc_title_ids"` 57 | } 58 | 59 | func ReadSettingsAsJSON(dataFolder string) string { 60 | if _, err := os.Stat(filepath.Join(dataFolder, SETTINGS_FILENAME)); err != nil { 61 | saveDefaultSettings(dataFolder) 62 | } 63 | file, _ := os.Open(filepath.Join(dataFolder, SETTINGS_FILENAME)) 64 | bytes, _ := ioutil.ReadAll(file) 65 | return string(bytes) 66 | } 67 | 68 | func ReadSettings(dataFolder string) *AppSettings { 69 | if settingsInstance != nil { 70 | return settingsInstance 71 | } 72 | settingsInstance = &AppSettings{Debug: false, ScanFolders: []string{}, 73 | OrganizeOptions: OrganizeOptions{SwitchSafeFileNames: true}, Prodkeys: "", IgnoreDLCTitleIds: []string{"01007F600B135007"}} 74 | if _, err := os.Stat(filepath.Join(dataFolder, SETTINGS_FILENAME)); err == nil { 75 | file, err := os.Open(filepath.Join(dataFolder, SETTINGS_FILENAME)) 76 | if err != nil { 77 | zap.S().Warnf("Missing or corrupted config file, creating a new one") 78 | return saveDefaultSettings(dataFolder) 79 | } else { 80 | _ = json.NewDecoder(file).Decode(&settingsInstance) 81 | return settingsInstance 82 | } 83 | } else { 84 | return saveDefaultSettings(dataFolder) 85 | } 86 | } 87 | 88 | func saveDefaultSettings(dataFolder string) *AppSettings { 89 | settingsInstance = &AppSettings{ 90 | TitlesEtag: "W/\"a5b02845cf6bd61:0\"", 91 | VersionsEtag: "W/\"2ef50d1cb6bd61:0\"", 92 | Prodkeys: dataFolder, 93 | Folder: "/mnt/roms", 94 | ScanFolders: []string{}, 95 | IgnoreDLCTitleIds: []string{}, 96 | Port: 3000, 97 | Debug: false, 98 | OrganizeOptions: OrganizeOptions{ 99 | RenameFiles: false, 100 | CreateFolderPerGame: false, 101 | FolderNameTemplate: fmt.Sprintf("{%v}", TEMPLATE_TITLE_NAME), 102 | FileNameTemplate: fmt.Sprintf("{%v} ({%v})[{%v}][v{%v}]", TEMPLATE_TITLE_NAME, TEMPLATE_DLC_NAME, 103 | TEMPLATE_TITLE_ID, TEMPLATE_VERSION), 104 | DeleteEmptyFolders: false, 105 | SwitchSafeFileNames: true, 106 | DeleteOldUpdateFiles: false, 107 | }, 108 | } 109 | return SaveSettings(settingsInstance, dataFolder) 110 | } 111 | 112 | func SaveSettings(settings *AppSettings, dataFolder string) *AppSettings { 113 | file, _ := json.MarshalIndent(settings, "", " ") 114 | _ = ioutil.WriteFile(filepath.Join(dataFolder, SETTINGS_FILENAME), file, 0644) 115 | settingsInstance = settings 116 | return settings 117 | } 118 | -------------------------------------------------------------------------------- /switchfs/_crypto/ecb.go: -------------------------------------------------------------------------------- 1 | package _crypto 2 | 3 | import "crypto/aes" 4 | 5 | func DecryptAes128Ecb(data, key []byte) []byte { 6 | 7 | cipher, _ := aes.NewCipher([]byte(key)) 8 | decrypted := make([]byte, len(data)) 9 | size := 16 10 | 11 | for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size { 12 | cipher.Decrypt(decrypted[bs:be], data[bs:be]) 13 | } 14 | 15 | return decrypted 16 | } 17 | -------------------------------------------------------------------------------- /switchfs/_crypto/xts.go: -------------------------------------------------------------------------------- 1 | /// Modified xts to support custom tweak 2 | 3 | // Copyright 2012 The Go Authors. All rights reserved. 4 | // Use of this source code is governed by a BSD-style 5 | // license that can be found in the LICENSE file. 6 | 7 | // Package xts implements the XTS cipher mode as specified in IEEE P1619/D16. 8 | // 9 | // XTS mode is typically used for disk encryption, which presents a number of 10 | // novel problems that make more common modes inapplicable. The disk is 11 | // conceptually an array of sectors and we must be able to encrypt and decrypt 12 | // a sector in isolation. However, an attacker must not be able to transpose 13 | // two sectors of plaintext by transposing their ciphertext. 14 | // 15 | // XTS wraps a block cipher with Rogaway's XEX mode in order to build a 16 | // tweakable block cipher. This allows each sector to have a unique tweak and 17 | // effectively create a unique key for each sector. 18 | // 19 | // XTS does not provide any authentication. An attacker can manipulate the 20 | // ciphertext and randomise a block (16 bytes) of the plaintext. This package 21 | // does not implement ciphertext-stealing so sectors must be a multiple of 16 22 | // bytes. 23 | // 24 | // Note that XTS is usually not appropriate for any use besides disk encryption. 25 | // Most users should use an AEAD mode like GCM (from crypto/cipher.NewGCM) instead. 26 | package _crypto 27 | 28 | import ( 29 | "crypto/cipher" 30 | "encoding/binary" 31 | "errors" 32 | "sync" 33 | "unsafe" 34 | ) 35 | 36 | // Cipher contains an expanded key structure. It is safe for concurrent use if 37 | // the underlying block cipher is safe for concurrent use. 38 | type Cipher struct { 39 | k1, k2 cipher.Block 40 | } 41 | 42 | // blockSize is the block size that the underlying cipher must have. XTS is 43 | // only defined for 16-byte ciphers. 44 | const blockSize = 16 45 | 46 | var tweakPool = sync.Pool{ 47 | New: func() interface{} { 48 | return new([blockSize]byte) 49 | }, 50 | } 51 | 52 | // NewCipher creates a Cipher given a function for creating the underlying 53 | // block cipher (which must have a block size of 16 bytes). The key must be 54 | // twice the length of the underlying cipher's key. 55 | func NewCipher(cipherFunc func([]byte) (cipher.Block, error), key []byte) (c *Cipher, err error) { 56 | c = new(Cipher) 57 | if c.k1, err = cipherFunc(key[:len(key)/2]); err != nil { 58 | return 59 | } 60 | c.k2, err = cipherFunc(key[len(key)/2:]) 61 | 62 | if c.k1.BlockSize() != blockSize { 63 | err = errors.New("xts: cipher does not have a block size of 16") 64 | } 65 | 66 | return 67 | } 68 | 69 | // Encrypt encrypts a sector of plaintext and puts the result into ciphertext. 70 | // Plaintext and ciphertext must overlap entirely or not at all. 71 | // Sectors must be a multiple of 16 bytes and less than 2²⁴ bytes. 72 | func (c *Cipher) Encrypt(ciphertext, plaintext []byte, sectorNum uint64) { 73 | if len(ciphertext) < len(plaintext) { 74 | panic("xts: ciphertext is smaller than plaintext") 75 | } 76 | if len(plaintext)%blockSize != 0 { 77 | panic("xts: plaintext is not a multiple of the block size") 78 | } 79 | if InexactOverlap(ciphertext[:len(plaintext)], plaintext) { 80 | panic("xts: invalid buffer overlap") 81 | } 82 | 83 | tweak := tweakPool.Get().(*[blockSize]byte) 84 | for i := range tweak { 85 | tweak[i] = 0 86 | } 87 | binary.LittleEndian.PutUint64(tweak[:8], sectorNum) 88 | 89 | c.k2.Encrypt(tweak[:], tweak[:]) 90 | 91 | for len(plaintext) > 0 { 92 | for j := range tweak { 93 | ciphertext[j] = plaintext[j] ^ tweak[j] 94 | } 95 | c.k1.Encrypt(ciphertext, ciphertext) 96 | for j := range tweak { 97 | ciphertext[j] ^= tweak[j] 98 | } 99 | plaintext = plaintext[blockSize:] 100 | ciphertext = ciphertext[blockSize:] 101 | 102 | mul2(tweak) 103 | } 104 | 105 | tweakPool.Put(tweak) 106 | } 107 | 108 | // Decrypt decrypts a sector of ciphertext and puts the result into plaintext. 109 | // Plaintext and ciphertext must overlap entirely or not at all. 110 | // Sectors must be a multiple of 16 bytes and less than 2²⁴ bytes. 111 | func (c *Cipher) Decrypt(plaintext, ciphertext []byte, tweak *[16]byte) { 112 | if len(plaintext) < len(ciphertext) { 113 | panic("xts: plaintext is smaller than ciphertext") 114 | } 115 | if len(ciphertext)%blockSize != 0 { 116 | panic("xts: ciphertext is not a multiple of the block size") 117 | } 118 | if InexactOverlap(plaintext[:len(ciphertext)], ciphertext) { 119 | panic("xts: invalid buffer overlap") 120 | } 121 | 122 | /* 123 | tweak := tweakPool.Get().(*[blockSize]byte) 124 | for i := range tweak { 125 | tweak[i] = 0 126 | } 127 | binary.LittleEndian.PutUint64(tweak[:8], sectorNum) 128 | */ 129 | c.k2.Encrypt(tweak[:], tweak[:]) 130 | for len(ciphertext) > 0 { 131 | for j := range tweak { 132 | plaintext[j] = ciphertext[j] ^ tweak[j] 133 | } 134 | c.k1.Decrypt(plaintext, plaintext) 135 | for j := range tweak { 136 | plaintext[j] ^= tweak[j] 137 | } 138 | plaintext = plaintext[blockSize:] 139 | ciphertext = ciphertext[blockSize:] 140 | 141 | mul2(tweak) 142 | } 143 | 144 | //tweakPool.Put(tweak) 145 | } 146 | 147 | // mul2 multiplies tweak by 2 in GF(2¹²⁸) with an irreducible polynomial of 148 | // x¹²⁸ + x⁷ + x² + x + 1. 149 | func mul2(tweak *[blockSize]byte) { 150 | var carryIn byte 151 | for j := range tweak { 152 | carryOut := tweak[j] >> 7 153 | tweak[j] = (tweak[j] << 1) + carryIn 154 | carryIn = carryOut 155 | } 156 | if carryIn != 0 { 157 | // If we have a carry bit then we need to subtract a multiple 158 | // of the irreducible polynomial (x¹²⁸ + x⁷ + x² + x + 1). 159 | // By dropping the carry bit, we're subtracting the x^128 term 160 | // so all that remains is to subtract x⁷ + x² + x + 1. 161 | // Subtraction (and addition) in this representation is just 162 | // XOR. 163 | tweak[0] ^= 1<<7 | 1<<2 | 1<<1 | 1 164 | } 165 | } 166 | 167 | func AnyOverlap(x, y []byte) bool { 168 | return len(x) > 0 && len(y) > 0 && 169 | uintptr(unsafe.Pointer(&x[0])) <= uintptr(unsafe.Pointer(&y[len(y)-1])) && 170 | uintptr(unsafe.Pointer(&y[0])) <= uintptr(unsafe.Pointer(&x[len(x)-1])) 171 | } 172 | 173 | // InexactOverlap reports whether x and y share memory at any non-corresponding 174 | // index. The memory beyond the slice length is ignored. Note that x and y can 175 | // have different lengths and still not have any inexact overlap. 176 | // 177 | // InexactOverlap can be used to implement the requirements of the crypto/cipher 178 | // AEAD, Block, BlockMode and Stream interfaces. 179 | func InexactOverlap(x, y []byte) bool { 180 | if len(x) == 0 || len(y) == 0 || &x[0] == &y[0] { 181 | return false 182 | } 183 | return AnyOverlap(x, y) 184 | } 185 | -------------------------------------------------------------------------------- /switchfs/cnmt.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "encoding/binary" 5 | "encoding/xml" 6 | "errors" 7 | "fmt" 8 | "strings" 9 | ) 10 | 11 | const ( 12 | ContentMetaType_SystemProgram = 1 13 | ContentMetaType_SystemData = 2 14 | ContentMetaType_SystemUpdate = 3 15 | ContentMetaType_BootImagePackage = 4 16 | ContentMetaType_BootImagePackageSafe = 5 17 | ContentMetaType_Application = 0x80 18 | ContentMetaType_Patch = 0x81 19 | ContentMetaType_AddOnContent = 0x82 20 | ContentMetaType_Delta = 0x83 21 | ) 22 | 23 | type Content struct { 24 | Text string `xml:",chardata"` 25 | Type string `xml:"Type"` 26 | ID string `xml:"Id"` 27 | Size string `xml:"Size"` 28 | Hash string `xml:"Hash"` 29 | KeyGeneration string `xml:"KeyGeneration"` 30 | } 31 | 32 | type ContentMetaAttributes struct { 33 | TitleId string `json:"title_id"` 34 | Version int `json:"version"` 35 | Type string `json:"type"` 36 | RequiredTitleVersion int `json:"required_title_version"` 37 | Contents map[string]Content 38 | Ncap *Nacp 39 | } 40 | 41 | type ContentMeta struct { 42 | XMLName xml.Name `xml:"ContentMeta"` 43 | Text string `xml:",chardata"` 44 | Type string `xml:"Type"` 45 | ID string `xml:"Id"` 46 | Version int `xml:"Version"` 47 | RequiredDownloadSystemVersion string `xml:"RequiredDownloadSystemVersion"` 48 | Content []struct { 49 | Text string `xml:",chardata"` 50 | Type string `xml:"Type"` 51 | ID string `xml:"Id"` 52 | Size string `xml:"Size"` 53 | Hash string `xml:"Hash"` 54 | KeyGeneration string `xml:"KeyGeneration"` 55 | } `xml:"Content"` 56 | Digest string `xml:"Digest"` 57 | KeyGenerationMin string `xml:"KeyGenerationMin"` 58 | RequiredSystemVersion string `xml:"RequiredSystemVersion"` 59 | OriginalId string `xml:"OriginalId"` 60 | } 61 | 62 | func readBinaryCnmt(pfs0 *PFS0, data []byte) (*ContentMetaAttributes, error) { 63 | if pfs0 == nil || len(pfs0.Files) != 1 { 64 | return nil, errors.New("unexpected pfs0") 65 | } 66 | cnmtFile := pfs0.Files[0] 67 | cnmt := data[int64(cnmtFile.StartOffset):] 68 | titleId := binary.LittleEndian.Uint64(cnmt[0:0x8]) 69 | version := binary.LittleEndian.Uint32(cnmt[0x8:0xC]) 70 | extendedHeaderSize := binary.LittleEndian.Uint16(cnmt[0xE:0x10]) 71 | extendedHeader := cnmt[0x20:0x20+extendedHeaderSize] 72 | requiredTitleVersion := binary.LittleEndian.Uint32(extendedHeader[0x8:0xC]) 73 | contentEntryCount := binary.LittleEndian.Uint16(cnmt[0x10:0x12]) 74 | //metaEntryCount := binary.LittleEndian.Uint16(cnmt[0x12:0x14]) 75 | contents := map[string]Content{} 76 | for i := uint16(0); i < contentEntryCount; i++ { 77 | position := 0x20 /*size of cnmt header*/ + extendedHeaderSize + (i * uint16(0x38)) 78 | ncaId := cnmt[position+0x20 : position+0x20+0x10] 79 | //fmt.Println(fmt.Sprintf("0%x", ncaId)) 80 | contentType := "" 81 | switch cnmt[position+0x36 : position+0x36+1][0] { 82 | case 0: 83 | contentType = "Meta" 84 | case 1: 85 | contentType = "Program" 86 | case 2: 87 | contentType = "Data" 88 | case 3: 89 | contentType = "Control" 90 | case 4: 91 | contentType = "HtmlDocument" 92 | case 5: 93 | contentType = "LegalInformation" 94 | case 6: 95 | contentType = "DeltaFragment" 96 | } 97 | contents[contentType] = Content{ID: fmt.Sprintf("%x", ncaId)} 98 | } 99 | metaType := "" 100 | switch cnmt[0xC:0xD][0] { 101 | case ContentMetaType_Application: 102 | metaType = "BASE" 103 | case ContentMetaType_AddOnContent: 104 | metaType = "DLC" 105 | case ContentMetaType_Patch: 106 | metaType = "UPD" 107 | } 108 | 109 | return &ContentMetaAttributes{Contents: contents, Version: int(version), TitleId: fmt.Sprintf("0%x", titleId), Type: metaType, RequiredTitleVersion: int(requiredTitleVersion)}, nil 110 | } 111 | 112 | func readXmlCnmt(xmlBytes []byte) (*ContentMetaAttributes, error) { 113 | cmt := &ContentMeta{} 114 | err := xml.Unmarshal(xmlBytes, &cmt) 115 | if err != nil { 116 | return nil, err 117 | } 118 | titleId := strings.Replace(cmt.ID, "0x", "", 1) 119 | return &ContentMetaAttributes{Version: cmt.Version, TitleId: titleId, Type: cmt.Type}, nil 120 | } 121 | -------------------------------------------------------------------------------- /switchfs/fs.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "bytes" 5 | "crypto/sha256" 6 | "encoding/binary" 7 | "errors" 8 | ) 9 | 10 | type fsHeader struct { 11 | encType byte //(0 = Auto, 1 = None, 2 = AesCtrOld, 3 = AesCtr, 4 = AesCtrEx) 12 | fsType byte //(0 = RomFs, 1 = PartitionFs) 13 | hashType byte // (0 = Auto, 2 = HierarchicalSha256, 3 = HierarchicalIntegrity (Ivfc)) 14 | fsHeaderBytes []byte 15 | generation uint32 16 | } 17 | 18 | type fsEntry struct { 19 | StartOffset uint32 20 | EndOffset uint32 21 | Size uint32 22 | } 23 | 24 | type hashInfo struct { 25 | pfs0HeaderOffset uint64 26 | pfs0size uint64 27 | } 28 | 29 | func getFsEntry(ncaHeader *ncaHeader, index int) fsEntry { 30 | fsEntryOffset := 0x240 + 0x10*index 31 | fsEntryBytes := ncaHeader.headerBytes[fsEntryOffset : fsEntryOffset+0x10] 32 | 33 | entryStartOffset := binary.LittleEndian.Uint32(fsEntryBytes[0x0:0x4]) * 0x200 34 | entryEndOffset := binary.LittleEndian.Uint32(fsEntryBytes[0x4:0x8]) * 0x200 35 | 36 | return fsEntry{StartOffset: entryStartOffset, EndOffset: entryEndOffset, Size: entryEndOffset - entryStartOffset} 37 | } 38 | 39 | func getFsHeader(ncaHeader *ncaHeader, index int) (*fsHeader, error) { 40 | fsHeaderHashOffset := /*hash pfs0HeaderOffset*/ 0x280 + /*hash pfs0size*/ 0x20*index 41 | fsHeaderHash := ncaHeader.headerBytes[fsHeaderHashOffset : fsHeaderHashOffset+0x20] 42 | 43 | fsHeaderOffset := 0x400 + 0x200*index 44 | fsHeaderBytes := ncaHeader.headerBytes[fsHeaderOffset : fsHeaderOffset+0x200] 45 | 46 | actualHash := sha256.Sum256(fsHeaderBytes) 47 | 48 | if bytes.Compare(actualHash[:], fsHeaderHash) != 0 { 49 | return nil, errors.New("fs headerBytes hash mismatch") 50 | } 51 | 52 | result := fsHeader{fsHeaderBytes: fsHeaderBytes} 53 | 54 | result.fsType = fsHeaderBytes[0x2:0x3][0] 55 | result.hashType = fsHeaderBytes[0x3:0x4][0] 56 | result.encType = fsHeaderBytes[0x4:0x5][0] 57 | 58 | generationBytes := fsHeaderBytes[0x140 : 0x140+0x4] //generation 59 | result.generation = binary.LittleEndian.Uint32(generationBytes) 60 | 61 | return &result, nil 62 | } 63 | 64 | func (fh *fsHeader) getHashInfo() (*hashInfo, error) { 65 | hashInfoBytes := fh.fsHeaderBytes[0x8:0x100] 66 | result := hashInfo{} 67 | if fh.hashType == 2 { 68 | 69 | result.pfs0HeaderOffset = binary.LittleEndian.Uint64(hashInfoBytes[0x38 : 0x38+0x8]) 70 | result.pfs0size = binary.LittleEndian.Uint64(hashInfoBytes[0x40 : 0x40+0x8]) 71 | return &result, nil 72 | } else if fh.hashType == 3 { 73 | result.pfs0HeaderOffset = binary.LittleEndian.Uint64(hashInfoBytes[0x88 : 0x88+0x8]) 74 | result.pfs0size = binary.LittleEndian.Uint64(hashInfoBytes[0x90 : 0x90+0x8]) 75 | return &result, nil 76 | } 77 | return nil, errors.New("non supported hash type") 78 | } 79 | -------------------------------------------------------------------------------- /switchfs/nacp.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "encoding/binary" 5 | "errors" 6 | "io" 7 | ) 8 | 9 | type Language int 10 | 11 | const ( 12 | AmericanEnglish = iota 13 | BritishEnglish 14 | Japanese 15 | French 16 | German 17 | LatinAmericanSpanish 18 | Spanish 19 | Italian 20 | Dutch 21 | CanadianFrench 22 | Portuguese 23 | Russian 24 | Korean 25 | Taiwanese 26 | Chinese 27 | ) 28 | 29 | type NacpTitle struct { 30 | Language Language 31 | Title string 32 | } 33 | 34 | type Nacp struct { 35 | TitleName map[string]NacpTitle 36 | Isbn string 37 | DisplayVersion string 38 | SupportedLanguageFlag uint32 39 | } 40 | 41 | func (l Language) String() string { 42 | return [...]string{ 43 | "AmericanEnglish", 44 | "BritishEnglish", 45 | "Japanese", 46 | "French", 47 | "German", 48 | "LatinAmericanSpanish", 49 | "Spanish", 50 | "Italian", 51 | "Dutch", 52 | "CanadianFrench", 53 | "Portuguese", 54 | "Russian", 55 | "Korean", 56 | "Taiwanese", 57 | "Chinese", 58 | "Chinese"}[l] 59 | } 60 | 61 | func (l Language) ToLanguageTag() string { 62 | return [...]string{ 63 | "en-US", 64 | "en-GB", 65 | "ja", 66 | "fr", 67 | "de", 68 | "es-419", 69 | "es", 70 | "it", 71 | "nl", 72 | "fr-CA", 73 | "pt", 74 | "ru", 75 | "ko", 76 | "zh-TW", 77 | "zh-Hant", 78 | "zh-Hans"}[l] 79 | } 80 | 81 | func ExtractNacp(cnmt *ContentMetaAttributes, file io.ReaderAt, securePartition *PFS0, securePartitionOffset int64) (*Nacp, error) { 82 | if control, ok := cnmt.Contents["Control"]; ok { 83 | controlNca := getNcaById(securePartition, control.ID) 84 | if controlNca != nil { 85 | fsHeader, section, err := openMetaNcaDataSection(file, securePartitionOffset+int64(controlNca.StartOffset)) 86 | if err != nil { 87 | return nil, err 88 | } 89 | if fsHeader.fsType == 0 { 90 | romFsHeader, err := readRomfsHeader(section) 91 | if err != nil { 92 | return nil, err 93 | } 94 | fEntries, err := readRomfsFileEntry(section, romFsHeader) 95 | if err != nil { 96 | return nil, err 97 | } 98 | 99 | if entry, ok := fEntries["control.nacp"]; ok { 100 | nacp, err := readNacp(section, romFsHeader, entry) 101 | if err != nil { 102 | return nil, err 103 | } 104 | return &nacp, nil 105 | } 106 | } else { 107 | return nil, errors.New("unsupported type " + control.ID) 108 | } 109 | } else { 110 | return nil, errors.New("unable to find control.nacp by id " + control.ID) 111 | } 112 | 113 | } 114 | return nil, errors.New("no control.nacp found") 115 | } 116 | 117 | /*https://switchbrew.org/wiki/NACP_Format*/ 118 | func readNacp(data []byte, romFsHeader RomfsHeader, fileEntry RomfsFileEntry) (Nacp, error) { 119 | offset := romFsHeader.DataOffset + fileEntry.offset 120 | titles := map[string]NacpTitle{} 121 | for i := 0; i < 16; i++ { 122 | //lang := i 123 | appTitleBytes := data[offset+(uint64(i)*0x300) : offset+(uint64(i)*0x300)+0x200] 124 | nameBytes := readBytesUntilZero(appTitleBytes) 125 | titles[Language(i).String()] = NacpTitle{Language: Language(i), Title: string(nameBytes)} 126 | } 127 | 128 | isbn := readBytesUntilZero(data[offset+0x3000 : offset+0x3000+0x25]) 129 | displayVersion := readBytesUntilZero(data[offset+0x3060 : offset+0x3060+0x10]) 130 | supportedLanguageFlag := binary.BigEndian.Uint32(data[offset+0x302C : offset+0x302C+0x4]) 131 | 132 | return Nacp{TitleName: titles, Isbn: string(isbn), DisplayVersion: string(displayVersion), SupportedLanguageFlag: supportedLanguageFlag}, nil 133 | /* 134 | 135 | 136 | Isbn = reader.ReadUtf8Z(37); 137 | reader.BaseStream.Position = start + 0x3025; 138 | StartupUserAccount = reader.ReadByte(); 139 | UserAccountSwitchLock = reader.ReadByte(); 140 | AocRegistrationType = reader.ReadByte(); 141 | AttributeFlag = reader.ReadInt32(); 142 | supportedLanguageFlag = reader.ReadUInt32(); 143 | ParentalControlFlag = reader.ReadUInt32(); 144 | Screenshot = reader.ReadByte(); 145 | VideoCapture = reader.ReadByte(); 146 | DataLossConfirmation = reader.ReadByte(); 147 | PlayLogPolicy = reader.ReadByte(); 148 | PresenceGroupId = reader.ReadUInt64(); 149 | 150 | for (int i = 0; i < RatingAge.Length; i++) 151 | { 152 | RatingAge[i] = reader.ReadSByte(); 153 | } 154 | 155 | DisplayVersion = reader.ReadUtf8Z(16); 156 | reader.BaseStream.Position = start + 0x3070; 157 | AddOnContentBaseId = reader.ReadUInt64(); 158 | SaveDataOwnerId = reader.ReadUInt64(); 159 | UserAccountSaveDataSize = reader.ReadInt64(); 160 | UserAccountSaveDataJournalSize = reader.ReadInt64(); 161 | DeviceSaveDataSize = reader.ReadInt64(); 162 | DeviceSaveDataJournalSize = reader.ReadInt64(); 163 | BcatDeliveryCacheStorageSize = reader.ReadInt64(); 164 | ApplicationErrorCodeCategory = reader.ReadUtf8Z(8); 165 | reader.BaseStream.Position = start + 0x30B0; 166 | 167 | for (int i = 0; i < LocalCommunicationId.Length; i++) 168 | { 169 | LocalCommunicationId[i] = reader.ReadUInt64(); 170 | } 171 | 172 | LogoType = reader.ReadByte(); 173 | LogoHandling = reader.ReadByte(); 174 | RuntimeAddOnContentInstall = reader.ReadByte(); 175 | Reserved00 = reader.ReadBytes(3); 176 | CrashReport = reader.ReadByte(); 177 | Hdcp = reader.ReadByte(); 178 | SeedForPseudoDeviceId = reader.ReadUInt64(); 179 | BcatPassphrase = reader.ReadUtf8Z(65); 180 | 181 | reader.BaseStream.Position = start + 0x3141; 182 | Reserved01 = reader.ReadByte(); 183 | Reserved02 = reader.ReadBytes(6); 184 | 185 | UserAccountSaveDataSizeMax = reader.ReadInt64(); 186 | UserAccountSaveDataJournalSizeMax = reader.ReadInt64(); 187 | DeviceSaveDataSizeMax = reader.ReadInt64(); 188 | DeviceSaveDataJournalSizeMax = reader.ReadInt64(); 189 | TemporaryStorageSize = reader.ReadInt64(); 190 | CacheStorageSize = reader.ReadInt64(); 191 | CacheStorageJournalSize = reader.ReadInt64(); 192 | CacheStorageDataAndJournalSizeMax = reader.ReadInt64(); 193 | CacheStorageIndex = reader.ReadInt16(); 194 | Reserved03 = reader.ReadBytes(6); 195 | 196 | for (int i = 0; i < 16; i++) 197 | { 198 | ulong value = reader.ReadUInt64(); 199 | if (value != 0) PlayLogQueryableApplicationId.Add(value); 200 | } 201 | 202 | PlayLogQueryCapability = reader.ReadByte(); 203 | RepairFlag = reader.ReadByte(); 204 | ProgramIndex = reader.ReadByte(); 205 | 206 | UserTotalSaveDataSize = UserAccountSaveDataSize + UserAccountSaveDataJournalSize; 207 | DeviceTotalSaveDataSize = DeviceSaveDataSize + DeviceSaveDataJournalSize; 208 | TotalSaveDataSize = UserTotalSaveDataSize + DeviceTotalSaveDataSize; 209 | */ 210 | } 211 | 212 | func readBytesUntilZero(appTitleBytes []byte) []byte { 213 | var nameBytes []byte 214 | for _, b := range appTitleBytes { 215 | if b == 0x0 { 216 | break 217 | } else { 218 | nameBytes = append(nameBytes, b) 219 | } 220 | } 221 | return nameBytes 222 | } 223 | -------------------------------------------------------------------------------- /switchfs/nca.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "crypto/aes" 5 | "crypto/cipher" 6 | "encoding/binary" 7 | "encoding/hex" 8 | "errors" 9 | "fmt" 10 | "github.com/dtrunk90/switch-library-manager-web/settings" 11 | "github.com/dtrunk90/switch-library-manager-web/switchfs/_crypto" 12 | "io" 13 | ) 14 | 15 | const ( 16 | NcaSectionType_Code = iota 17 | NcaSectionType_Data 18 | NcaSectionType_Logo 19 | ) 20 | const ( 21 | NcaContentType_Program = iota 22 | NcaContentType_Meta 23 | NcaContentType_Control 24 | NcaContentType_Manual 25 | NcaContentType_Data 26 | NcaContentType_PublicData 27 | ) 28 | 29 | func openMetaNcaDataSection(reader io.ReaderAt, ncaOffset int64) (*fsHeader, []byte, error) { 30 | //read the NCA headerBytes 31 | encNcaHeader := make([]byte, 0xC00) 32 | n, err := reader.ReadAt(encNcaHeader, ncaOffset) 33 | 34 | if err != nil { 35 | return nil, nil, errors.New("failed to read NCA header " + err.Error()) 36 | } 37 | if n != 0xC00 { 38 | return nil, nil, errors.New("failed to read NCA header") 39 | } 40 | 41 | keys, err := settings.SwitchKeys() 42 | if err != nil { 43 | return nil, nil, err 44 | } 45 | headerKey := keys.GetKey("header_key") 46 | if headerKey == "" { 47 | return nil, nil, errors.New("missing key - header_key") 48 | } 49 | ncaHeader, err := DecryptNcaHeader(headerKey, encNcaHeader) 50 | if err != nil { 51 | return nil, nil, err 52 | } 53 | 54 | if ncaHeader.HasRightsId() { 55 | //fail - need title keys 56 | return nil, nil, errors.New("non standard encryption is not supported") 57 | } 58 | 59 | /*if ncaHeader.contentType != NcaContentType_Meta { 60 | return nil, errors.New("not a meta NCA") 61 | }*/ 62 | 63 | dataSectionIndex := 0 64 | 65 | fsHeader, err := getFsHeader(ncaHeader, dataSectionIndex) 66 | if err != nil { 67 | return nil, nil, err 68 | } 69 | 70 | entry := getFsEntry(ncaHeader, dataSectionIndex) 71 | 72 | if entry.Size == 0 { 73 | return nil, nil, errors.New("empty section") 74 | } 75 | 76 | encodedEntryContent := make([]byte, entry.Size) 77 | entryOffset := ncaOffset + int64(entry.StartOffset) 78 | _, err = reader.ReadAt(encodedEntryContent, entryOffset) 79 | if err != nil { 80 | return nil, nil, err 81 | } 82 | if fsHeader.encType != 3 { 83 | return nil, nil, errors.New("non supported encryption type [encryption type:" + string(fsHeader.encType)) 84 | } 85 | 86 | /*if fsHeader.hashType != 2 { //Sha256 (FS_TYPE_PFS0) 87 | return nil, errors.New("non FS_TYPE_PFS0") 88 | }*/ 89 | decoded, err := decryptAesCtr(ncaHeader, fsHeader, entry.StartOffset, entry.Size, encodedEntryContent) 90 | if err != nil { 91 | return nil, nil, err 92 | } 93 | hashInfo, err := fsHeader.getHashInfo() 94 | if err != nil { 95 | return nil, nil, err 96 | } 97 | 98 | return fsHeader, decoded[hashInfo.pfs0HeaderOffset:], nil 99 | } 100 | 101 | func decryptAesCtr(ncaHeader *ncaHeader, fsHeader *fsHeader, offset uint32, size uint32, encoded []byte) ([]byte, error) { 102 | keyRevision := ncaHeader.getKeyRevision() 103 | cryptoType := ncaHeader.cryptoType 104 | 105 | if cryptoType != 0 { 106 | return []byte{}, errors.New("unsupported crypto type") 107 | } 108 | 109 | keys, _ := settings.SwitchKeys() 110 | 111 | keyName := fmt.Sprintf("key_area_key_application_0%x", keyRevision) 112 | KeyString := keys.GetKey(keyName) 113 | if KeyString == "" { 114 | return nil, errors.New(fmt.Sprintf("missing Key_area_key[%v]", keyName)) 115 | } 116 | key, _ := hex.DecodeString(KeyString) 117 | 118 | decKey := _crypto.DecryptAes128Ecb(ncaHeader.encryptedKeys[0x20:0x30], key) 119 | 120 | counter := make([]byte, 0x10) 121 | binary.BigEndian.PutUint64(counter, uint64(fsHeader.generation)) 122 | binary.BigEndian.PutUint64(counter[8:], uint64(offset/0x10)) 123 | 124 | c, _ := aes.NewCipher(decKey) 125 | 126 | decContent := make([]byte, size) 127 | 128 | s := cipher.NewCTR(c, counter) 129 | s.XORKeyStream(decContent, encoded[0:size]) 130 | 131 | return decContent, nil 132 | } 133 | -------------------------------------------------------------------------------- /switchfs/ncaHeader.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "crypto/aes" 5 | "encoding/binary" 6 | "encoding/hex" 7 | "github.com/dtrunk90/switch-library-manager-web/switchfs/_crypto" 8 | "strconv" 9 | ) 10 | 11 | //https://switchbrew.org/wiki/NCA_Format 12 | 13 | type ncaHeader struct { 14 | headerBytes []byte 15 | rightsId []byte 16 | titleId []byte 17 | distribution byte 18 | contentType byte // (0x00 = Program, 0x01 = Meta, 0x02 = Control, 0x03 = Manual, 0x04 = Data, 0x05 = PublicData) 19 | keyGeneration2 byte 20 | keyGeneration1 byte 21 | encryptedKeys []byte // 4 * 0x10 22 | cryptoType byte //(0x00 = Application, 0x01 = Ocean, 0x02 = System) 23 | 24 | } 25 | 26 | func (n *ncaHeader) HasRightsId() bool { 27 | for i := 0; i < 0x10; i++ { 28 | if n.rightsId[i] != 0 { 29 | return true 30 | } 31 | } 32 | return false 33 | } 34 | 35 | func (n *ncaHeader) getKeyRevision() int { 36 | keyGeneration := max(n.keyGeneration1, n.keyGeneration2) 37 | keyRevision := keyGeneration - 1 38 | if keyGeneration == 0 { 39 | keyRevision = 0 40 | } 41 | return int(keyRevision) 42 | } 43 | 44 | func max(a byte, b byte) byte { 45 | if a > b { 46 | return a 47 | } 48 | return b 49 | } 50 | 51 | func DecryptNcaHeader(key string, encHeader []byte) (*ncaHeader, error) { 52 | headerKey, _ := hex.DecodeString(key) 53 | c, err := _crypto.NewCipher(aes.NewCipher, headerKey) 54 | if err != nil { 55 | return nil, err 56 | } 57 | sector := 0 58 | sectorSize := 0x200 59 | endOffset := 0x400 60 | decryptNcaHeader, err := _decryptNcaHeader(c, encHeader, endOffset, sectorSize, sector) 61 | if err != nil { 62 | return nil, err 63 | } 64 | 65 | magic := string(decryptNcaHeader[0x200:0x204]) 66 | 67 | if magic == "NCA3" { 68 | endOffset = 0xC00 69 | decryptNcaHeader, err = _decryptNcaHeader(c, encHeader, endOffset, sectorSize, sector) 70 | } 71 | 72 | result := ncaHeader{headerBytes: decryptNcaHeader} 73 | 74 | result.distribution = decryptNcaHeader[0x204:0x205][0] 75 | result.contentType = decryptNcaHeader[0x205:0x206][0] 76 | result.rightsId = decryptNcaHeader[0x230 : 0x230+0x10] 77 | 78 | title_id_dec := binary.LittleEndian.Uint64(decryptNcaHeader[0x210 : 0x210+0x8]) 79 | result.titleId = []byte(strconv.FormatInt(int64(title_id_dec), 16)) 80 | result.keyGeneration1 = decryptNcaHeader[0x206:0x207][0] 81 | result.keyGeneration2 = decryptNcaHeader[0x220:0x221][0] 82 | 83 | encryptedKeysAreaOffset := 0x300 84 | result.encryptedKeys = decryptNcaHeader[encryptedKeysAreaOffset : encryptedKeysAreaOffset+(0x10*4)] 85 | 86 | result.cryptoType = decryptNcaHeader[0x207:0x208][0] 87 | 88 | return &result, nil 89 | } 90 | 91 | func _decryptNcaHeader(c *_crypto.Cipher, header []byte, end int, sectorSize int, sectorNum int) ([]byte, error) { 92 | decrypted := make([]byte, len(header)) 93 | for pos := 0; pos < end; pos += sectorSize { 94 | /* Workaround for Nintendo's custom sector...manually generate the tweak. */ 95 | tweak := getNintendoTweak(sectorNum) 96 | 97 | pos := sectorSize * sectorNum 98 | c.Decrypt(decrypted[pos:pos+sectorSize], header[pos:pos+sectorSize], &tweak) 99 | sectorNum++ 100 | } 101 | return decrypted, nil 102 | } 103 | 104 | func getNintendoTweak(sector int) [16]byte { 105 | tweak := [16]byte{} 106 | for i := 0xF; i >= 0; i-- { /* Nintendo LE custom tweak... */ 107 | tweak[i] = uint8(sector & 0xFF) 108 | sector >>= 8 109 | } 110 | return tweak 111 | } 112 | -------------------------------------------------------------------------------- /switchfs/nsp.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "bytes" 5 | "errors" 6 | "go.uber.org/zap" 7 | "strings" 8 | ) 9 | 10 | func ReadNspMetadata(filePath string) (map[string]*ContentMetaAttributes, error) { 11 | 12 | pfs0, err := ReadPfs0File(filePath) 13 | if err != nil { 14 | return nil, errors.New("Invalid NSP file, reason - [" + err.Error() + "]") 15 | } 16 | 17 | file, err := OpenFile(filePath) 18 | if err != nil { 19 | return nil, err 20 | } 21 | 22 | defer file.Close() 23 | 24 | contentMap := map[string]*ContentMetaAttributes{} 25 | 26 | for _, pfs0File := range pfs0.Files { 27 | 28 | fileOffset := int64(pfs0File.StartOffset) 29 | 30 | if strings.Contains(pfs0File.Name, "cnmt.nca") { 31 | _, section, err := openMetaNcaDataSection(file, fileOffset) 32 | if err != nil { 33 | return nil, err 34 | } 35 | currpfs0, err := readPfs0(bytes.NewReader(section), 0x0) 36 | if err != nil { 37 | return nil, err 38 | } 39 | currCnmt, err := readBinaryCnmt(currpfs0, section) 40 | if err != nil { 41 | return nil, err 42 | } 43 | if currCnmt.Type != "DLC" { 44 | nacp, err := ExtractNacp(currCnmt, file, pfs0, 0) 45 | if err != nil { 46 | zap.S().Debug("Failed to extract nacp [%v]\n", err.Error()) 47 | } 48 | currCnmt.Ncap = nacp 49 | } 50 | 51 | contentMap[currCnmt.TitleId] = currCnmt 52 | 53 | } /*else if strings.Contains(pfs0File.Name, ".cnmt.xml") { 54 | xmlBytes := make([]byte, pfs0File.Size) 55 | _, err = file.ReadAt(xmlBytes, fileOffset) 56 | if err != nil { 57 | return nil, err 58 | } 59 | 60 | currCnmt, err := readXmlCnmt(xmlBytes) 61 | if err != nil { 62 | return nil, err 63 | } 64 | contentMap[currCnmt.TitleId] = currCnmt 65 | }*/ 66 | } 67 | return contentMap, nil 68 | 69 | } 70 | -------------------------------------------------------------------------------- /switchfs/pfs0.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "encoding/binary" 5 | "errors" 6 | "io" 7 | ) 8 | 9 | const ( 10 | PfsfileEntryTableSize = 0x18 11 | HfsfileEntryTableSize = 0x40 12 | pfs0Magic = "PFS0" 13 | hfs0Magic = "HFS0" 14 | ) 15 | 16 | type fileEntry struct { 17 | StartOffset uint64 18 | Size uint64 19 | Name string 20 | } 21 | 22 | // PFS0 struct to represent PFS0 filesystem of NSP 23 | type PFS0 struct { 24 | Filepath string 25 | Size uint64 26 | HeaderLen uint16 27 | Files []fileEntry 28 | } 29 | 30 | // https://wiki.oatmealdome.me/PFS0_(File_Format) 31 | func ReadPfs0File(filePath string) (*PFS0, error) { 32 | 33 | file, err := OpenFile(filePath) 34 | if err != nil { 35 | return nil, err 36 | } 37 | 38 | defer file.Close() 39 | 40 | p, err := readPfs0(file, 0x0) 41 | if err != nil { 42 | return nil, err 43 | } 44 | p.Filepath = filePath 45 | return p, nil 46 | } 47 | 48 | func readPfs0(reader io.ReaderAt, offset int64) (*PFS0, error) { 49 | 50 | header := make([]byte, 0xC) 51 | n, err := reader.ReadAt(header, offset) 52 | if err != nil { 53 | return nil, err 54 | } 55 | if n != 0xC { 56 | return nil, errors.New("failed to read file") 57 | } 58 | var fileEntryTableSize uint16 59 | if string(header[:0x4]) == pfs0Magic { 60 | fileEntryTableSize = PfsfileEntryTableSize 61 | } else if string(header[:0x4]) == hfs0Magic { 62 | fileEntryTableSize = HfsfileEntryTableSize 63 | } else { 64 | return nil, errors.New("Invalid NSP headerBytes. Expected 'PFS0'/'HFS0', got '" + string(header[:0x4]) + "'") 65 | } 66 | p := &PFS0{} 67 | 68 | fileCount := binary.LittleEndian.Uint16(header[0x4:0x8]) 69 | 70 | fileEntryTableOffset := 0x10 + (fileEntryTableSize * fileCount) 71 | 72 | stringsLen := binary.LittleEndian.Uint16(header[0x8:0xC]) 73 | p.HeaderLen = fileEntryTableOffset + stringsLen 74 | fileNamesBuffer := make([]byte, stringsLen) 75 | _, err = reader.ReadAt(fileNamesBuffer, offset+int64(fileEntryTableOffset)) 76 | if err != nil { 77 | return nil, err 78 | } 79 | 80 | p.Files = make([]fileEntry, fileCount) 81 | // go over the fileEntries 82 | for i := uint16(0); i < fileCount; i++ { 83 | fileEntryTable := make([]byte, fileEntryTableSize) 84 | _, err = reader.ReadAt(fileEntryTable, offset+int64(0x10+(fileEntryTableSize*i))) 85 | if err != nil { 86 | return nil, err 87 | } 88 | 89 | fileOffset := binary.LittleEndian.Uint64(fileEntryTable[0:8]) 90 | fileSize := binary.LittleEndian.Uint64(fileEntryTable[8:16]) 91 | var nameBytes []byte 92 | for _, b := range fileNamesBuffer[binary.LittleEndian.Uint16(fileEntryTable[16:20]):] { 93 | if b == 0x0 { 94 | break 95 | } else { 96 | nameBytes = append(nameBytes, b) 97 | } 98 | } 99 | 100 | p.Files[i] = fileEntry{fileOffset + uint64(p.HeaderLen), fileSize, string(nameBytes)} 101 | } 102 | p.HeaderLen += stringsLen 103 | 104 | return p, nil 105 | } 106 | -------------------------------------------------------------------------------- /switchfs/romfs.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "encoding/binary" 5 | "errors" 6 | ) 7 | 8 | type RomfsHeader struct { 9 | HeaderSize uint64 10 | DirHashTableOffset uint64 11 | DirHashTableSize uint64 12 | DirMetaTableOffset uint64 13 | DirMetaTableSize uint64 14 | FileHashTableOffset uint64 15 | FileHashTableSize uint64 16 | FileMetaTableOffset uint64 17 | FileMetaTableSize uint64 18 | DataOffset uint64 19 | } 20 | 21 | type RomfsFileEntry struct { 22 | parent uint32 23 | sibling uint32 24 | offset uint64 25 | size uint64 26 | hash uint32 27 | name_size uint32 28 | name string 29 | } 30 | 31 | func readRomfsHeader(data []byte) (RomfsHeader, error) { 32 | header := RomfsHeader{} 33 | header.HeaderSize = binary.LittleEndian.Uint64(data[0x0+(0x8*0) : 0x0+(0x8*1)]) 34 | header.DirHashTableOffset = binary.LittleEndian.Uint64(data[0x0+(0x8*1) : 0x0+(0x8*2)]) 35 | header.DirHashTableSize = binary.LittleEndian.Uint64(data[0x0+(0x8*2) : 0x0+(0x8*3)]) 36 | header.DirMetaTableOffset = binary.LittleEndian.Uint64(data[0x0+(0x8*3) : 0x0+(0x8*4)]) 37 | header.DirMetaTableSize = binary.LittleEndian.Uint64(data[0x0+(0x8*4) : 0x0+(0x8*5)]) 38 | header.FileHashTableOffset = binary.LittleEndian.Uint64(data[0x0+(0x8*5) : 0x0+(0x8*6)]) 39 | header.FileHashTableSize = binary.LittleEndian.Uint64(data[0x0+(0x8*6) : 0x0+(0x8*7)]) 40 | header.FileMetaTableOffset = binary.LittleEndian.Uint64(data[0x0+(0x8*7) : 0x0+(0x8*8)]) 41 | header.FileMetaTableSize = binary.LittleEndian.Uint64(data[0x0+(0x8*8) : 0x0+(0x8*9)]) 42 | header.DataOffset = binary.LittleEndian.Uint64(data[0x0+(0x8*9) : 0x0+(0x8*10)]) 43 | return header, nil 44 | } 45 | 46 | func readRomfsFileEntry(data []byte, header RomfsHeader) (map[string]RomfsFileEntry, error) { 47 | if header.FileMetaTableOffset+header.FileMetaTableSize > uint64(len(data)) { 48 | return nil, errors.New("failed to read romfs") 49 | } 50 | dirBytes := data[header.FileMetaTableOffset : header.FileMetaTableOffset+header.FileMetaTableSize] 51 | result := map[string]RomfsFileEntry{} 52 | offset := uint32(0x0) 53 | for offset < uint32(header.FileHashTableSize) { 54 | entry := RomfsFileEntry{} 55 | entry.parent = binary.LittleEndian.Uint32(dirBytes[offset : offset+0x4]) 56 | entry.sibling = binary.LittleEndian.Uint32(dirBytes[offset+0x4 : offset+0x8]) 57 | entry.offset = binary.LittleEndian.Uint64(dirBytes[offset+0x8 : offset+0x10]) 58 | entry.size = binary.LittleEndian.Uint64(dirBytes[offset+0x10 : offset+0x18]) 59 | entry.hash = binary.LittleEndian.Uint32(dirBytes[offset+0x18 : offset+0x1C]) 60 | entry.name_size = binary.LittleEndian.Uint32(dirBytes[offset+0x1C : offset+0x20]) 61 | entry.name = string(dirBytes[offset+0x20 : (offset+0x20)+entry.name_size]) 62 | result[entry.name] = entry 63 | offset = offset + 0x20 + entry.name_size 64 | } 65 | return result, nil 66 | 67 | //fmt.Println(string(section[DataOffset+offset+0x3060:DataOffset+offset+0x3060 +0x10])) 68 | } 69 | -------------------------------------------------------------------------------- /switchfs/splitFileReader.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "errors" 5 | "github.com/avast/retry-go" 6 | "io" 7 | "io/ioutil" 8 | "os" 9 | "path" 10 | "strconv" 11 | "strings" 12 | ) 13 | 14 | type ReadAtCloser interface { 15 | io.ReaderAt 16 | io.Closer 17 | } 18 | 19 | type splitFile struct { 20 | info []os.FileInfo 21 | files []ReadAtCloser 22 | path string 23 | chunkSize int64 24 | } 25 | 26 | type fileWrapper struct { 27 | file ReadAtCloser 28 | path string 29 | } 30 | 31 | func NewFileWrapper(filePath string) (*fileWrapper, error) { 32 | result := fileWrapper{} 33 | result.path = filePath 34 | file, err := _openFile(filePath) 35 | if err != nil { 36 | return nil, err 37 | } 38 | result.file = file 39 | return &result, nil 40 | } 41 | 42 | func (sp *fileWrapper) ReadAt(p []byte, off int64) (n int, err error) { 43 | if sp.file != nil { 44 | return sp.file.ReadAt(p, off) 45 | } 46 | return 0, errors.New("file is not opened") 47 | } 48 | 49 | func (sp *fileWrapper) Close() error { 50 | 51 | if sp.file != nil { 52 | return sp.file.Close() 53 | } 54 | 55 | return nil 56 | } 57 | 58 | func NewSplitFileReader(filePath string) (*splitFile, error) { 59 | result := splitFile{} 60 | index := strings.LastIndex(filePath, string(os.PathSeparator)) 61 | splitFileFolder := filePath[:index] 62 | files, err := ioutil.ReadDir(splitFileFolder) 63 | if err != nil { 64 | return nil, err 65 | } 66 | result.path = splitFileFolder 67 | result.chunkSize = files[0].Size() 68 | result.info = make([]os.FileInfo, 0, len(files)) 69 | result.files = make([]ReadAtCloser, len(files)) 70 | for _, file := range files { 71 | if _, err := strconv.Atoi(file.Name()[len(file.Name())-1:]); err == nil { 72 | result.info = append(result.info, file) 73 | } 74 | } 75 | return &result, nil 76 | } 77 | 78 | func (sp *splitFile) ReadAt(p []byte, off int64) (n int, err error) { 79 | //calculate the part containing the offset 80 | part := int(off / sp.chunkSize) 81 | 82 | if len(sp.info) < part { 83 | return 0, errors.New("missing part " + strconv.Itoa(part)) 84 | } 85 | 86 | if len(sp.files) == 0 || sp.files[part] == nil { 87 | file, _ := _openFile(path.Join(sp.path, sp.info[part].Name())) 88 | sp.files[part] = file 89 | } 90 | off = off - sp.chunkSize*int64(part) 91 | 92 | if off < 0 || off > sp.info[part].Size() { 93 | return 0, errors.New("offset is out of bounds") 94 | } 95 | return sp.files[part].ReadAt(p, off) 96 | } 97 | 98 | func _openFile(path string) (*os.File, error) { 99 | var file *os.File 100 | var err error 101 | retry.Attempts(5) 102 | err = retry.Do( 103 | func() error { 104 | file, err = os.Open(path) 105 | return err 106 | }, 107 | ) 108 | return file, err 109 | } 110 | 111 | func (sp *splitFile) Close() error { 112 | for _, file := range sp.files { 113 | if file != nil { 114 | file.Close() 115 | } 116 | } 117 | return nil 118 | } 119 | 120 | func OpenFile(filePath string) (ReadAtCloser, error) { 121 | //check if it's a split file 122 | if _, err := strconv.Atoi(filePath[len(filePath)-1:]); err == nil { 123 | return NewSplitFileReader(filePath) 124 | } else { 125 | return NewFileWrapper(filePath) 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /switchfs/xci.go: -------------------------------------------------------------------------------- 1 | package switchfs 2 | 3 | import ( 4 | "bytes" 5 | "encoding/binary" 6 | "errors" 7 | "go.uber.org/zap" 8 | "io" 9 | "strings" 10 | ) 11 | 12 | func ReadXciMetadata(filePath string) (map[string]*ContentMetaAttributes, error) { 13 | file, err := OpenFile(filePath) 14 | if err != nil { 15 | return nil, err 16 | } 17 | 18 | defer file.Close() 19 | 20 | header := make([]byte, 0x200) 21 | _, err = file.ReadAt(header, 0) 22 | if err != nil { 23 | return nil, err 24 | } 25 | 26 | if string(header[0x100:0x104]) != "HEAD" { 27 | return nil, errors.New("Invalid XCI headerBytes. Expected 'HEAD', got '" + string(header[:0x4]) + "'") 28 | } 29 | 30 | rootPartitionOffset := binary.LittleEndian.Uint64(header[0x130:0x138]) 31 | //rootPartitionSize := binary.LittleEndian.Uint64(header[0x138:0x140]) 32 | 33 | rootHfs0, err := readPfs0(file, int64(rootPartitionOffset)) 34 | if err != nil { 35 | return nil, err 36 | } 37 | 38 | secureHfs0, secureOffset, err := readSecurePartition(file, rootHfs0, rootPartitionOffset) 39 | if err != nil { 40 | return nil, err 41 | } 42 | 43 | contentMap := map[string]*ContentMetaAttributes{} 44 | 45 | for _, pfs0File := range secureHfs0.Files { 46 | 47 | fileOffset := secureOffset + int64(pfs0File.StartOffset) 48 | 49 | if strings.Contains(pfs0File.Name, "cnmt.nca") { 50 | _, section, err := openMetaNcaDataSection(file, fileOffset) 51 | if err != nil { 52 | return nil, err 53 | } 54 | currPfs0, err := readPfs0(bytes.NewReader(section), 0x0) 55 | if err != nil { 56 | return nil, err 57 | } 58 | currCnmt, err := readBinaryCnmt(currPfs0, section) 59 | if err != nil { 60 | return nil, err 61 | } 62 | 63 | if currCnmt.Type == "BASE" || currCnmt.Type == "UPD" { 64 | nacp, err := ExtractNacp(currCnmt, file, secureHfs0, secureOffset) 65 | if err != nil { 66 | zap.S().Debug("Failed to extract nacp [%v]\n", err.Error()) 67 | } 68 | currCnmt.Ncap = nacp 69 | } 70 | 71 | contentMap[currCnmt.TitleId] = currCnmt 72 | 73 | } /* else if strings.Contains(pfs0File.Name, ".cnmt.xml") { 74 | xmlBytes := make([]byte, pfs0File.Size) 75 | _, err = file.ReadAt(xmlBytes, fileOffset) 76 | if err != nil { 77 | return nil, err 78 | } 79 | 80 | currCnmt, err := readXmlCnmt(xmlBytes) 81 | if err != nil { 82 | return nil, err 83 | } 84 | contentMap[currCnmt.TitleId] = currCnmt 85 | }*/ 86 | } 87 | 88 | return contentMap, nil 89 | } 90 | 91 | func getNcaById(hfs0 *PFS0, id string) *fileEntry { 92 | for _, fileEntry := range hfs0.Files { 93 | if strings.Contains(fileEntry.Name, id) { 94 | return &fileEntry 95 | } 96 | } 97 | return nil 98 | } 99 | 100 | func readSecurePartition(file io.ReaderAt, hfs0 *PFS0, rootPartitionOffset uint64) (*PFS0, int64, error) { 101 | for _, hfs0File := range hfs0.Files { 102 | offset := int64(rootPartitionOffset) + int64(hfs0File.StartOffset) 103 | 104 | if hfs0File.Name == "secure" { 105 | securePartition, err := readPfs0(file, offset) 106 | if err != nil { 107 | return nil, 0, err 108 | } 109 | return securePartition, offset, nil 110 | } 111 | } 112 | return nil, 0, nil 113 | } 114 | -------------------------------------------------------------------------------- /web/api.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "encoding/json" 5 | "github.com/dtrunk90/switch-library-manager-web/db" 6 | "github.com/gorilla/mux" 7 | "net/http" 8 | "path/filepath" 9 | "strconv" 10 | "strings" 11 | ) 12 | 13 | type ApiFileInfo struct { 14 | DownloadUrl string `json:"downloadUrl,omitempty"` 15 | Size int64 `json:"size,omitempty"` 16 | Type string `json:"type,omitempty"` 17 | } 18 | 19 | type ApiSystemInfo struct { 20 | RequiredSystemVersion int `json:"requiredSystemVersion,omitempty"` 21 | } 22 | 23 | type ApiExtendedFileInfo struct { 24 | ApiFileInfo 25 | DisplayVersion string `json:"displayVersion,omitempty"` 26 | Version int `json:"version"` 27 | } 28 | 29 | type ApiUpdateItem struct { 30 | ApiExtendedFileInfo 31 | ApiSystemInfo 32 | } 33 | 34 | type ApiDlcItem struct { 35 | ApiExtendedFileInfo 36 | Name string `json:"name"` 37 | RequiredApplicationVersion int `json:"requiredApplicationVersion,omitempty"` 38 | } 39 | 40 | type ApiTitleItem struct { 41 | ApiFileInfo 42 | ApiSystemInfo 43 | BannerUrl string `json:"bannerUrl,omitempty"` 44 | IconUrl string `json:"iconUrl,omitempty"` 45 | ThumbnailUrl string `json:"thumbnailUrl,omitempty"` 46 | LatestUpdate ApiUpdateItem `json:"latestUpdate"` 47 | Name map[string]string `json:"name"` 48 | Region string `json:"region,omitempty"` 49 | Dlc map[string]ApiDlcItem `json:"dlc,omitempty"` 50 | } 51 | 52 | func (web *Web) HandleApi() { 53 | web.router.HandleFunc("/api/titles", func(w http.ResponseWriter, r *http.Request) { 54 | w.Header().Set("Content-Type", "application/json") 55 | 56 | items := map[string]ApiTitleItem{} 57 | 58 | if web.state.localDB != nil { 59 | for k, v := range web.state.localDB.TitlesMap { 60 | if v.BaseExist { 61 | titleId := strings.ToUpper(v.File.Metadata.TitleId) 62 | latestUpdate := ApiUpdateItem{ ApiExtendedFileInfo: ApiExtendedFileInfo { Version: v.LatestUpdate } } 63 | name := map[string]string{} 64 | 65 | if v.File.Metadata.Ncap != nil { 66 | latestUpdate.DisplayVersion = v.File.Metadata.Ncap.DisplayVersion 67 | } 68 | 69 | if v.Updates != nil && len(v.Updates) != 0 { 70 | latestUpdate.DownloadUrl = "/api/titles/" + titleId + "/updates/" + strconv.Itoa(v.LatestUpdate) 71 | latestUpdate.Size = v.Updates[v.LatestUpdate].ExtendedInfo.Size 72 | latestUpdate.Type = strings.ToUpper(filepath.Ext(v.Updates[v.LatestUpdate].ExtendedInfo.FileName)[1:]) 73 | latestUpdate.RequiredSystemVersion = v.Updates[v.LatestUpdate].Metadata.RequiredTitleVersion 74 | 75 | if v.Updates[v.LatestUpdate].Metadata.Ncap != nil { 76 | latestUpdate.DisplayVersion = v.Updates[v.LatestUpdate].Metadata.Ncap.DisplayVersion 77 | } 78 | } 79 | 80 | if v.File.Metadata.Ncap != nil { 81 | for _, langV := range v.File.Metadata.Ncap.TitleName { 82 | if langV.Title != "" { 83 | name[langV.Language.ToLanguageTag()] = langV.Title 84 | } 85 | } 86 | } 87 | 88 | if len(name) == 0 { 89 | name["unknown"] = db.ParseTitleNameFromFileName(v.File.ExtendedInfo.FileName) 90 | } 91 | 92 | items[titleId] = ApiTitleItem { 93 | ApiFileInfo: ApiFileInfo { 94 | DownloadUrl: "/api/titles/" + titleId, 95 | Size: v.File.ExtendedInfo.Size, 96 | Type: strings.ToUpper(filepath.Ext(v.File.ExtendedInfo.FileName)[1:]), 97 | }, 98 | ApiSystemInfo: ApiSystemInfo { 99 | RequiredSystemVersion: v.File.Metadata.RequiredTitleVersion, 100 | }, 101 | LatestUpdate: latestUpdate, 102 | Name: name, 103 | } 104 | 105 | if title, ok1 := web.state.switchDB.TitlesMap[k]; ok1 { 106 | if item, ok2 := items[titleId]; ok2 { 107 | item.Region = title.Attributes.Region 108 | items[titleId] = item 109 | } 110 | } 111 | 112 | if item, ok1 := items[titleId]; ok1 { 113 | if v.Banner != "" { 114 | item.BannerUrl = "/i/" + v.Banner 115 | } 116 | 117 | if v.Icon != "" { 118 | item.IconUrl = "/i/" + v.Icon 119 | } 120 | 121 | if item.IconUrl != "" { 122 | item.ThumbnailUrl = item.IconUrl + "?width=90" 123 | } else if item.BannerUrl != "" { 124 | item.ThumbnailUrl = item.BannerUrl + "?width=90" 125 | } 126 | 127 | item.Dlc = map[string]ApiDlcItem{} 128 | 129 | for id, dlc := range v.Dlc { 130 | dlcTitleId := strings.ToUpper(id) 131 | 132 | item.Dlc[dlcTitleId] = ApiDlcItem { 133 | ApiExtendedFileInfo: ApiExtendedFileInfo { 134 | ApiFileInfo: ApiFileInfo { 135 | DownloadUrl: "/api/titles/" + titleId + "/dlc/" + dlcTitleId, 136 | Size: dlc.ExtendedInfo.Size, 137 | Type: strings.ToUpper(filepath.Ext(dlc.ExtendedInfo.FileName)[1:]), 138 | }, 139 | Version: dlc.Metadata.Version, 140 | }, 141 | RequiredApplicationVersion: dlc.Metadata.RequiredTitleVersion, 142 | } 143 | 144 | if entry, ok2 := web.state.switchDB.TitlesMap[k].Dlc[id]; ok2 { 145 | if dlcItem, ok3 := item.Dlc[dlcTitleId]; ok3 { 146 | dlcItem.Name = entry.Name 147 | item.Dlc[dlcTitleId] = dlcItem 148 | } 149 | } 150 | 151 | if entry, ok2 := item.Dlc[dlcTitleId]; ok2 { 152 | if dlc.Metadata.Ncap != nil { 153 | entry.DisplayVersion = dlc.Metadata.Ncap.DisplayVersion 154 | } 155 | 156 | item.Dlc[dlcTitleId] = entry 157 | } 158 | } 159 | 160 | items[titleId] = item 161 | } 162 | } 163 | } 164 | } 165 | 166 | json.NewEncoder(w).Encode(items) 167 | }) 168 | 169 | web.router.HandleFunc("/api/titles/{titleId}", func(w http.ResponseWriter, r *http.Request) { 170 | if web.state.localDB != nil { 171 | vars := mux.Vars(r) 172 | 173 | for _, v := range web.state.localDB.TitlesMap { 174 | if v.BaseExist && strings.ToUpper(v.File.Metadata.TitleId) == strings.ToUpper(vars["titleId"]) { 175 | w.Header().Set("Content-Type", "application/octet-stream") 176 | w.Header().Set("Content-Disposition", "attachment; filename=" + strconv.Quote(v.File.ExtendedInfo.FileName)) 177 | http.ServeFile(w, r, filepath.Join(v.File.ExtendedInfo.BaseFolder, v.File.ExtendedInfo.FileName)) 178 | return 179 | } 180 | } 181 | } 182 | 183 | w.WriteHeader(http.StatusNotFound) 184 | }) 185 | 186 | web.router.HandleFunc("/api/titles/{titleId}/updates/{version}", func(w http.ResponseWriter, r *http.Request) { 187 | if web.state.localDB != nil { 188 | vars := mux.Vars(r) 189 | 190 | if version, err := strconv.Atoi(vars["version"]); err == nil { 191 | for _, v := range web.state.localDB.TitlesMap { 192 | if v.BaseExist && strings.ToUpper(v.File.Metadata.TitleId) == strings.ToUpper(vars["titleId"]) { 193 | if v.Updates != nil && len(v.Updates) != 0 { 194 | if update, ok := v.Updates[version]; ok { 195 | w.Header().Set("Content-Type", "application/octet-stream") 196 | w.Header().Set("Content-Disposition", "attachment; filename=" + strconv.Quote(update.ExtendedInfo.FileName)) 197 | http.ServeFile(w, r, filepath.Join(update.ExtendedInfo.BaseFolder, update.ExtendedInfo.FileName)) 198 | return 199 | } 200 | } 201 | 202 | break 203 | } 204 | } 205 | } 206 | } 207 | 208 | w.WriteHeader(http.StatusNotFound) 209 | }) 210 | 211 | web.router.HandleFunc("/api/titles/{titleId}/dlc/{dlcTitleId}", func(w http.ResponseWriter, r *http.Request) { 212 | if web.state.localDB != nil { 213 | vars := mux.Vars(r) 214 | 215 | for _, v := range web.state.localDB.TitlesMap { 216 | if v.BaseExist && strings.ToUpper(v.File.Metadata.TitleId) == strings.ToUpper(vars["titleId"]) { 217 | if v.Dlc != nil && len(v.Dlc) != 0 { 218 | if dlc, ok := v.Dlc[strings.ToUpper(vars["dlcTitleId"])]; ok { 219 | w.Header().Set("Content-Type", "application/octet-stream") 220 | w.Header().Set("Content-Disposition", "attachment; filename=" + strconv.Quote(dlc.ExtendedInfo.FileName)) 221 | http.ServeFile(w, r, filepath.Join(dlc.ExtendedInfo.BaseFolder, dlc.ExtendedInfo.FileName)) 222 | return 223 | } 224 | } 225 | 226 | break 227 | } 228 | } 229 | } 230 | 231 | w.WriteHeader(http.StatusNotFound) 232 | }) 233 | } 234 | -------------------------------------------------------------------------------- /web/dlc.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "github.com/dtrunk90/switch-library-manager-web/pagination" 5 | "github.com/dtrunk90/switch-library-manager-web/process" 6 | "github.com/dtrunk90/switch-library-manager-web/settings" 7 | "strings" 8 | ) 9 | 10 | func (web *Web) HandleDLC() { 11 | fsPatterns := []string { 12 | "resources/layout.html", 13 | "resources/partials/filter.html", 14 | "resources/partials/pagination.html", 15 | "resources/pages/dlc.html", 16 | } 17 | 18 | web.HandleFiltered("/dlc.html", func(filter *TitleItemFilter) any { 19 | globalPageData.IsKeysFileAvailable = settings.IsKeysFileAvailable() 20 | globalPageData.Page = "dlc" 21 | items, p := web.getMissingDLC(filter) 22 | return TitleItemsPageData { 23 | GlobalPageData: globalPageData, 24 | TitleItems: items, 25 | Filter: filter, 26 | Pagination: p, 27 | } 28 | }, web.embedFS, fsPatterns...) 29 | } 30 | 31 | func (web *Web) getMissingDLC(filter *TitleItemFilter) ([]TitleItem, pagination.Pagination) { 32 | items := []TitleItem{} 33 | 34 | if web.state.localDB == nil { 35 | return items, pagination.Calculate(filter.Page, filter.PerPage, 0) 36 | } 37 | 38 | settingsObj := settings.ReadSettings(web.dataFolder) 39 | ignoreIds := map[string]struct{}{} 40 | 41 | for _, id := range settingsObj.IgnoreDLCTitleIds { 42 | ignoreIds[strings.ToLower(id)] = struct{}{} 43 | } 44 | 45 | missingDLC := process.ScanForMissingDLC(web.state.localDB.TitlesMap, web.state.switchDB.TitlesMap, ignoreIds) 46 | 47 | for _, v := range missingDLC { 48 | if filter.Keyword == "" || strings.Contains(strings.ToLower(v.Attributes.Id), strings.ToLower(filter.Keyword)) || strings.Contains(strings.ToLower(v.Attributes.Name), strings.ToLower(filter.Keyword)) { 49 | var imageUrl string 50 | if v.Attributes.IconUrl != "" { 51 | imageUrl = v.Attributes.IconUrl 52 | } else if v.Attributes.BannerUrl != "" { 53 | imageUrl = v.Attributes.BannerUrl 54 | } 55 | 56 | items = append(items, TitleItem { 57 | ImageUrl: imageUrl, 58 | Id: strings.ToUpper(v.Attributes.Id), 59 | MissingDLC: v.MissingDLC, 60 | Name: v.Attributes.Name, 61 | Region: v.Attributes.Region, 62 | }) 63 | } 64 | } 65 | 66 | p := pagination.Calculate(filter.Page, filter.PerPage, len(items)) 67 | 68 | if err := sortItems(filter, items); err != nil { 69 | web.sugarLogger.Error(err) 70 | } 71 | 72 | return items[p.Start:p.End], p 73 | } 74 | -------------------------------------------------------------------------------- /web/filter.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "errors" 5 | "sort" 6 | ) 7 | 8 | type TitleItemFilter struct { 9 | Keyword string `in:"form=q"` 10 | PerPage int `in:"form=per_page;default=24"` 11 | SortBy string `in:"form=sort_by;default=name"` 12 | SortOrder string `in:"form=sort_order;default=asc"` 13 | Page int `in:"form=page;default=1"` 14 | } 15 | 16 | type TitleItemById []TitleItem 17 | type TitleItemByLatestUpdateDate []TitleItem 18 | type TitleItemByMissingLen []TitleItem 19 | type TitleItemByName []TitleItem 20 | type TitleItemByRegion []TitleItem 21 | type TitleItemByReleaseDate []TitleItem 22 | type TitleItemByType []TitleItem 23 | 24 | func (a TitleItemById) Len() int { return len(a) } 25 | func (a TitleItemById) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 26 | func (a TitleItemById) Less(i, j int) bool { 27 | return a[i].Id < a[j].Id 28 | } 29 | 30 | func (a TitleItemByLatestUpdateDate) Len() int { return len(a) } 31 | func (a TitleItemByLatestUpdateDate) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 32 | func (a TitleItemByLatestUpdateDate) Less(i, j int) bool { 33 | if a[i].LatestUpdateDate == a[j].LatestUpdateDate { 34 | return a[i].Name < a[j].Name 35 | } 36 | 37 | return a[i].LatestUpdateDate.Before(a[j].LatestUpdateDate) 38 | } 39 | 40 | func (a TitleItemByMissingLen) Len() int { return len(a) } 41 | func (a TitleItemByMissingLen) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 42 | func (a TitleItemByMissingLen) Less(i, j int) bool { 43 | if len(a[i].MissingDLC) == len(a[j].MissingDLC) { 44 | return a[i].Name < a[j].Name 45 | } 46 | 47 | return len(a[i].MissingDLC) < len(a[j].MissingDLC) 48 | } 49 | 50 | func (a TitleItemByName) Len() int { return len(a) } 51 | func (a TitleItemByName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 52 | func (a TitleItemByName) Less(i, j int) bool { 53 | return a[i].Name < a[j].Name 54 | } 55 | 56 | func (a TitleItemByRegion) Len() int { return len(a) } 57 | func (a TitleItemByRegion) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 58 | func (a TitleItemByRegion) Less(i, j int) bool { 59 | if a[i].Region == a[j].Region { 60 | return a[i].Name < a[j].Name 61 | } 62 | 63 | return a[i].Region < a[j].Region 64 | } 65 | 66 | func (a TitleItemByReleaseDate) Len() int { return len(a) } 67 | func (a TitleItemByReleaseDate) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 68 | func (a TitleItemByReleaseDate) Less(i, j int) bool { 69 | if a[i].ReleaseDate == a[j].ReleaseDate { 70 | return a[i].Name < a[j].Name 71 | } 72 | 73 | return a[i].ReleaseDate.Before(a[j].ReleaseDate) 74 | } 75 | 76 | func (a TitleItemByType) Len() int { return len(a) } 77 | func (a TitleItemByType) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 78 | func (a TitleItemByType) Less(i, j int) bool { 79 | if a[i].Type == a[j].Type { 80 | return a[i].Name < a[j].Name 81 | } 82 | 83 | return a[i].Type < a[j].Type 84 | } 85 | 86 | func sortItems(filter *TitleItemFilter, items []TitleItem) error { 87 | var data sort.Interface 88 | 89 | switch filter.SortBy { 90 | case "id": 91 | data = TitleItemById(items) 92 | case "latest_update_date": 93 | data = TitleItemByLatestUpdateDate(items) 94 | case "missing": 95 | data = TitleItemByMissingLen(items) 96 | case "name": 97 | data = TitleItemByName(items) 98 | case "region": 99 | data = TitleItemByRegion(items) 100 | case "release_date": 101 | data = TitleItemByReleaseDate(items) 102 | case "type": 103 | data = TitleItemByType(items) 104 | default: 105 | return errors.New("Unknown value for parameter sort_by") 106 | } 107 | 108 | if filter.SortOrder == "desc" { 109 | data = sort.Reverse(data) 110 | } else if filter.SortOrder != "asc" { 111 | return errors.New("Unknown value for parameter sort_order") 112 | } 113 | 114 | sort.Sort(data) 115 | 116 | return nil 117 | } 118 | -------------------------------------------------------------------------------- /web/http.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "github.com/ggicci/httpin" 7 | "github.com/justinas/alice" 8 | "html/template" 9 | "io/fs" 10 | "log" 11 | "net/http" 12 | ) 13 | 14 | type ErrorResponse struct { 15 | GlobalError GlobalError `json:"globalError"` 16 | FieldErrors []FieldError `json:"fieldErrors"` 17 | } 18 | 19 | type GlobalError struct { 20 | StrongMessage string `json:"strongMessage"` 21 | Message string `json:"message"` 22 | } 23 | 24 | type FieldError struct { 25 | Field string `json:"field"` 26 | Message string `json:"message"` 27 | } 28 | 29 | type SuccessResponse struct { 30 | StrongMessage string `json:"strongMessage"` 31 | Message string `json:"message"` 32 | } 33 | 34 | type FilteredPageData func(filter *TitleItemFilter) any 35 | type Validate func(value any) ErrorResponse 36 | type OnSuccess func(value any) SuccessResponse 37 | type PageData func() any 38 | 39 | func hasErrors(errorResponse ErrorResponse) bool { 40 | return errorResponse.GlobalError != GlobalError{} || len(errorResponse.FieldErrors) > 0 41 | } 42 | 43 | func (web *Web) HandleFiltered(pattern string, filteredPageData FilteredPageData, fs fs.FS, fsPatterns ...string) { 44 | tmpl := template.New("layout").Funcs(funcMap) 45 | tmpl, err := tmpl.ParseFS(fs, fsPatterns...) 46 | 47 | if err != nil { 48 | web.sugarLogger.Error(fmt.Errorf("parsing template failed: %w", err)) 49 | log.Fatal(err) 50 | } 51 | 52 | web.router.Handle(pattern, alice.New(httpin.NewInput(TitleItemFilter{})).ThenFunc(func(w http.ResponseWriter, r *http.Request) { 53 | filter := r.Context().Value(httpin.Input).(*TitleItemFilter) 54 | if err := tmpl.ExecuteTemplate(w, "layout", filteredPageData(filter)); err != nil { 55 | web.sugarLogger.Error(fmt.Errorf("executing template failed: %w", err)) 56 | w.WriteHeader(http.StatusInternalServerError) 57 | } 58 | })) 59 | } 60 | 61 | func (web *Web) HandleValidated(pattern string, inputStruct interface{}, pageData PageData, validate Validate, onSuccess OnSuccess, fs fs.FS, fsPatterns ...string) { 62 | tmpl := template.New("layout").Funcs(funcMap) 63 | tmpl, err := tmpl.ParseFS(fs, fsPatterns...) 64 | 65 | if err != nil { 66 | web.sugarLogger.Error(fmt.Errorf("parsing template failed: %w", err)) 67 | log.Fatal(err) 68 | } 69 | 70 | web.router.Handle(pattern, alice.New(httpin.NewInput(inputStruct)).ThenFunc(func(w http.ResponseWriter, r *http.Request) { 71 | switch r.Method { 72 | case "GET": 73 | if err := tmpl.ExecuteTemplate(w, "layout", pageData()); err != nil { 74 | web.sugarLogger.Error(fmt.Errorf("executing template failed: %w", err)) 75 | w.WriteHeader(http.StatusInternalServerError) 76 | } 77 | case "POST": 78 | w.Header().Set("Content-Type", "application/json") 79 | 80 | value := r.Context().Value(httpin.Input) 81 | jsonEncoder := json.NewEncoder(w) 82 | 83 | if errorResponse := validate(value); hasErrors(errorResponse) { 84 | w.WriteHeader(http.StatusBadRequest) 85 | jsonEncoder.Encode(errorResponse) 86 | return 87 | } 88 | 89 | jsonEncoder.Encode(onSuccess(value)) 90 | default: 91 | web.sugarLogger.Error(fmt.Errorf("Unsupported method: %s", r.Method)) 92 | w.WriteHeader(http.StatusMethodNotAllowed) 93 | } 94 | })) 95 | } 96 | 97 | func (web *Web) Handle(pattern string, pageData PageData, fs fs.FS, fsPatterns ...string) { 98 | tmpl := template.New("layout").Funcs(funcMap) 99 | tmpl, err := tmpl.ParseFS(fs, fsPatterns...) 100 | 101 | if err != nil { 102 | web.sugarLogger.Error(fmt.Errorf("parsing template failed: %w", err)) 103 | log.Fatal(err) 104 | } 105 | 106 | web.router.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) { 107 | if err := tmpl.ExecuteTemplate(w, "layout", pageData()); err != nil { 108 | web.sugarLogger.Error(fmt.Errorf("executing template failed: %w", err)) 109 | w.WriteHeader(http.StatusInternalServerError) 110 | } 111 | }) 112 | } 113 | -------------------------------------------------------------------------------- /web/img.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "github.com/pierrre/imageserver" 5 | imageserver_http "github.com/pierrre/imageserver/http" 6 | imageserver_http_gift "github.com/pierrre/imageserver/http/gift" 7 | imageserver_image "github.com/pierrre/imageserver/image" 8 | _ "github.com/pierrre/imageserver/image/gif" 9 | imageserver_image_gift "github.com/pierrre/imageserver/image/gift" 10 | imageserver_source "github.com/pierrre/imageserver/source" 11 | _ "github.com/pierrre/imageserver/image/jpeg" 12 | _ "github.com/pierrre/imageserver/image/png" 13 | "io/ioutil" 14 | "net/http" 15 | "path/filepath" 16 | "strings" 17 | ) 18 | 19 | func (web *Web) HandleImages() { 20 | http.Handle("/i/", &imageserver_http.Handler { 21 | Parser: imageserver_http.ListParser([]imageserver_http.Parser { 22 | &imageserver_http.SourceTransformParser { 23 | Parser: &imageserver_http.SourcePathParser{}, 24 | Transform: func(source string) string { 25 | return strings.TrimPrefix(source, "/i/") 26 | }, 27 | }, 28 | &imageserver_http_gift.ResizeParser{}, 29 | }), 30 | Server: &imageserver.HandlerServer { 31 | Server: imageserver.Server(imageserver.ServerFunc(func(params imageserver.Params) (*imageserver.Image, error) { 32 | source, err := params.GetString(imageserver_source.Param) 33 | if err != nil { 34 | return nil, err 35 | } 36 | im, err := web.GetImg(source) 37 | if err != nil { 38 | return nil, &imageserver.ParamError{Param: imageserver_source.Param, Message: err.Error()} 39 | } 40 | return im, nil 41 | })), 42 | Handler: &imageserver_image.Handler { 43 | Processor: &imageserver_image_gift.ResizeProcessor{}, 44 | }, 45 | }, 46 | }) 47 | } 48 | 49 | func (web *Web) GetImg(name string) (*imageserver.Image, error) { 50 | filePath := filepath.Join(web.dataFolder, "img", name) 51 | data, err := ioutil.ReadFile(filePath) 52 | 53 | if err != nil { 54 | return nil, err 55 | } 56 | 57 | im := &imageserver.Image { 58 | Format: "jpeg", 59 | Data: data, 60 | } 61 | 62 | return im, nil 63 | } 64 | -------------------------------------------------------------------------------- /web/index.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "fmt" 5 | "github.com/dtrunk90/switch-library-manager-web/db" 6 | "github.com/dtrunk90/switch-library-manager-web/pagination" 7 | "github.com/dtrunk90/switch-library-manager-web/settings" 8 | "path/filepath" 9 | "strings" 10 | ) 11 | 12 | func getType(gameFile *db.SwitchGameFiles) string { 13 | if gameFile.IsSplit { 14 | return "split" 15 | } 16 | 17 | if gameFile.MultiContent { 18 | return "multi-content" 19 | } 20 | 21 | ext := filepath.Ext(gameFile.File.ExtendedInfo.FileName) 22 | if len(ext) > 1 { 23 | return ext[1:] 24 | } 25 | 26 | return "" 27 | } 28 | 29 | func (web *Web) HandleIndex() { 30 | fsPatterns := []string { 31 | "resources/layout.html", 32 | "resources/partials/card.html", 33 | "resources/partials/filter.html", 34 | "resources/partials/pagination.html", 35 | "resources/pages/index.html", 36 | } 37 | 38 | web.HandleFiltered("/index.html", func(filter *TitleItemFilter) any { 39 | globalPageData.IsKeysFileAvailable = settings.IsKeysFileAvailable() 40 | globalPageData.Page = "index" 41 | items, p := web.getLibrary(filter) 42 | return TitleItemsPageData { 43 | GlobalPageData: globalPageData, 44 | TitleItems: items, 45 | Filter: filter, 46 | Pagination: p, 47 | } 48 | }, web.embedFS, fsPatterns...) 49 | } 50 | 51 | func (web *Web) getLibrary(filter *TitleItemFilter) ([]TitleItem, pagination.Pagination) { 52 | items := []TitleItem{} 53 | 54 | if web.state.localDB == nil { 55 | return items, pagination.Calculate(filter.Page, filter.PerPage, 0) 56 | } 57 | 58 | for k, v := range web.state.localDB.TitlesMap { 59 | if v.BaseExist { 60 | version := "" 61 | name := "" 62 | if v.File.Metadata.Ncap != nil { 63 | version = v.File.Metadata.Ncap.DisplayVersion 64 | name = v.File.Metadata.Ncap.TitleName["AmericanEnglish"].Title 65 | } 66 | 67 | if v.Updates != nil && len(v.Updates) != 0 { 68 | if v.Updates[v.LatestUpdate].Metadata.Ncap != nil { 69 | version = v.Updates[v.LatestUpdate].Metadata.Ncap.DisplayVersion 70 | } else { 71 | version = "" 72 | } 73 | } 74 | 75 | if title, ok := web.state.switchDB.TitlesMap[k]; ok { 76 | if title.Attributes.Name != "" { 77 | name = title.Attributes.Name 78 | } 79 | 80 | if filter.Keyword == "" || strings.Contains(strings.ToLower(v.File.Metadata.TitleId), strings.ToLower(filter.Keyword)) || strings.Contains(strings.ToLower(name), strings.ToLower(filter.Keyword)) { 81 | var imageUrl string 82 | if v.Icon != "" { 83 | imageUrl = "/i/" + v.Icon 84 | } else if v.Banner != "" { 85 | imageUrl = "/i/" + v.Banner 86 | } 87 | 88 | release, err := intToTime(title.Attributes.ReleaseDate) 89 | if err != nil { 90 | web.sugarLogger.Error(fmt.Errorf("parsing time failed: %w", err)) 91 | } 92 | 93 | items = append(items, TitleItem { 94 | ImageUrl: imageUrl, 95 | Id: strings.ToUpper(v.File.Metadata.TitleId), 96 | LocalUpdate: v.LatestUpdate, 97 | Name: name, 98 | Region: title.Attributes.Region, 99 | ReleaseDate: release, 100 | Type: strings.ToUpper(getType(v)), 101 | Version: version, 102 | }) 103 | } 104 | } else { 105 | if name == "" { 106 | name = db.ParseTitleNameFromFileName(v.File.ExtendedInfo.FileName) 107 | } 108 | 109 | if filter.Keyword == "" || strings.Contains(strings.ToLower(v.File.Metadata.TitleId), strings.ToLower(filter.Keyword)) || strings.Contains(strings.ToLower(name), strings.ToLower(filter.Keyword)) { 110 | items = append(items, TitleItem { 111 | Id: strings.ToUpper(v.File.Metadata.TitleId), 112 | LocalUpdate: v.LatestUpdate, 113 | Name: name, 114 | Type: strings.ToUpper(getType(v)), 115 | Version: version, 116 | }) 117 | } 118 | } 119 | } 120 | } 121 | 122 | p := pagination.Calculate(filter.Page, filter.PerPage, len(items)) 123 | 124 | if err := sortItems(filter, items); err != nil { 125 | web.sugarLogger.Error(err) 126 | } 127 | 128 | return items[p.Start:p.End], p 129 | } 130 | -------------------------------------------------------------------------------- /web/issues.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "github.com/dtrunk90/switch-library-manager-web/settings" 5 | "path/filepath" 6 | ) 7 | 8 | type Issue struct { 9 | File string 10 | Reason string 11 | } 12 | 13 | type IssuesPageData struct { 14 | GlobalPageData 15 | Issues []Issue 16 | } 17 | 18 | func (web *Web) HandleIssues() { 19 | fsPatterns := []string { 20 | "resources/layout.html", 21 | "resources/pages/issues.html", 22 | } 23 | 24 | web.Handle("/issues.html", func() any { 25 | globalPageData.IsKeysFileAvailable = settings.IsKeysFileAvailable() 26 | globalPageData.Page = "issues" 27 | issues := web.getIssues() 28 | return IssuesPageData { 29 | GlobalPageData: globalPageData, 30 | Issues: issues, 31 | } 32 | }, web.embedFS, fsPatterns...) 33 | } 34 | 35 | func (web *Web) getIssues() []Issue { 36 | issues := []Issue{} 37 | 38 | if web.state.localDB == nil { 39 | return issues 40 | } 41 | 42 | for _, v := range web.state.localDB.TitlesMap { 43 | if !v.BaseExist { 44 | for _, update := range v.Updates { 45 | issues = append(issues, Issue{File: filepath.Join(update.ExtendedInfo.BaseFolder, update.ExtendedInfo.FileName), Reason: "base file is missing"}) 46 | } 47 | 48 | for _, dlc := range v.Dlc { 49 | issues = append(issues, Issue{File: filepath.Join(dlc.ExtendedInfo.BaseFolder, dlc.ExtendedInfo.FileName), Reason: "base file is missing"}) 50 | } 51 | } 52 | } 53 | 54 | for k, v := range web.state.localDB.Skipped { 55 | issues = append(issues, Issue{File: filepath.Join(k.BaseFolder, k.FileName), Reason: v.ReasonText}) 56 | } 57 | 58 | return issues 59 | } 60 | -------------------------------------------------------------------------------- /web/missing.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "fmt" 5 | "github.com/dtrunk90/switch-library-manager-web/pagination" 6 | "github.com/dtrunk90/switch-library-manager-web/settings" 7 | "strings" 8 | ) 9 | 10 | func (web *Web) HandleMissing() { 11 | fsPatterns := []string { 12 | "resources/layout.html", 13 | "resources/partials/card.html", 14 | "resources/partials/filter.html", 15 | "resources/partials/pagination.html", 16 | "resources/pages/missing.html", 17 | } 18 | 19 | web.HandleFiltered("/missing.html", func(filter *TitleItemFilter) any { 20 | globalPageData.IsKeysFileAvailable = settings.IsKeysFileAvailable() 21 | globalPageData.Page = "missing" 22 | items, p := web.getMissingGames(filter) 23 | return TitleItemsPageData { 24 | GlobalPageData: globalPageData, 25 | TitleItems: items, 26 | Filter: filter, 27 | Pagination: p, 28 | } 29 | }, web.embedFS, fsPatterns...) 30 | } 31 | 32 | func (web *Web) getMissingGames(filter *TitleItemFilter) ([]TitleItem, pagination.Pagination) { 33 | items := []TitleItem{} 34 | 35 | if web.state.localDB == nil { 36 | return items, pagination.Calculate(filter.Page, filter.PerPage, 0) 37 | } 38 | 39 | for k, v := range web.state.switchDB.TitlesMap { 40 | if _, ok := web.state.localDB.TitlesMap[k]; ok { 41 | continue 42 | } 43 | 44 | if v.Attributes.Name == "" || v.Attributes.Id == "" { 45 | continue 46 | } 47 | 48 | if filter.Keyword == "" || strings.Contains(strings.ToLower(v.Attributes.Id), strings.ToLower(filter.Keyword)) || strings.Contains(strings.ToLower(v.Attributes.Name), strings.ToLower(filter.Keyword)) { 49 | var imageUrl string 50 | if v.Attributes.IconUrl != "" { 51 | imageUrl = v.Attributes.IconUrl 52 | } else if v.Attributes.BannerUrl != "" { 53 | imageUrl = v.Attributes.BannerUrl 54 | } 55 | 56 | release, err := intToTime(v.Attributes.ReleaseDate) 57 | if err != nil { 58 | web.sugarLogger.Error(fmt.Errorf("parsing time failed: %w", err)) 59 | } 60 | 61 | items = append(items, TitleItem { 62 | ImageUrl: imageUrl, 63 | Id: strings.ToUpper(v.Attributes.Id), 64 | Name: v.Attributes.Name, 65 | Region: v.Attributes.Region, 66 | ReleaseDate: release, 67 | }) 68 | } 69 | } 70 | 71 | p := pagination.Calculate(filter.Page, filter.PerPage, len(items)) 72 | 73 | if err := sortItems(filter, items); err != nil { 74 | web.sugarLogger.Error(err) 75 | } 76 | 77 | return items[p.Start:p.End], p 78 | } 79 | -------------------------------------------------------------------------------- /web/organize.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | func (web *Web) HandleOrganize() { 4 | } 5 | -------------------------------------------------------------------------------- /web/resources.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "fmt" 5 | "io/fs" 6 | "log" 7 | "net/http" 8 | ) 9 | 10 | func (web *Web) HandleResources() { 11 | fSys, err := fs.Sub(web.embedFS, "resources/static") 12 | if err != nil { 13 | web.sugarLogger.Error(fmt.Errorf("getting static files failed: %w", err)) 14 | log.Fatal(err) 15 | } 16 | http.Handle("/resources/static/", http.StripPrefix("/resources/static/", http.FileServer(http.FS(fSys)))) 17 | 18 | fSys, err = fs.Sub(web.embedFS, "node_modules") 19 | if err != nil { 20 | web.sugarLogger.Error(fmt.Errorf("getting vendor files failed: %w", err)) 21 | log.Fatal(err) 22 | } 23 | http.Handle("/resources/vendor/", http.StripPrefix("/resources/vendor/", http.FileServer(http.FS(fSys)))) 24 | } 25 | -------------------------------------------------------------------------------- /web/settings.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "github.com/dtrunk90/switch-library-manager-web/settings" 5 | "os" 6 | "regexp" 7 | "strings" 8 | ) 9 | 10 | type SettingsPageData struct { 11 | GlobalPageData 12 | Settings *settings.AppSettings 13 | } 14 | 15 | type SettingsForm struct { 16 | Prodkeys string `in:"form=prod_keys"` 17 | ScanFolders string `in:"form=scan_folders"` 18 | IgnoreDLCTitleIds string `in:"form=ignore_dlc_title_ids"` 19 | } 20 | 21 | func SplitAndTrimSpaceArray(s string, sep string) []string { 22 | arr := []string{} 23 | 24 | for _, v := range strings.Split(s, sep) { 25 | if value := strings.TrimSpace(v); value != "" { 26 | arr = append(arr, value) 27 | } 28 | } 29 | 30 | return arr 31 | } 32 | 33 | func (web *Web) HandleSettings() { 34 | fsPatterns := []string { 35 | "resources/layout.html", 36 | "resources/pages/settings.html", 37 | } 38 | 39 | web.HandleValidated("/settings.html", SettingsForm{}, func() any { 40 | globalPageData.IsKeysFileAvailable = settings.IsKeysFileAvailable() 41 | globalPageData.Page = "settings" 42 | return SettingsPageData { 43 | GlobalPageData: globalPageData, 44 | Settings: web.appSettings, 45 | } 46 | }, func(value any) ErrorResponse { 47 | settingsForm := value.(*SettingsForm) 48 | errorResponse := ErrorResponse{ 49 | FieldErrors: []FieldError{}, 50 | } 51 | 52 | if strings.TrimSpace(settingsForm.Prodkeys) != "" { 53 | keys, err := settings.GetSwitchKeys(settingsForm.Prodkeys) 54 | if err != nil { 55 | errorResponse.FieldErrors = append(errorResponse.FieldErrors, FieldError { 56 | Field: "prod_keys", 57 | Message: "Error trying to read Product Keys (" + err.Error() + ")", 58 | }) 59 | } else if keys["header_key"] == "" { 60 | errorResponse.FieldErrors = append(errorResponse.FieldErrors, FieldError { 61 | Field: "prod_keys", 62 | Message: "Please provide a valid Product Keys Path", 63 | }) 64 | } 65 | } 66 | 67 | scanFolders := SplitAndTrimSpaceArray(settingsForm.ScanFolders, "\n") 68 | 69 | if len(scanFolders) == 0 { 70 | errorResponse.FieldErrors = append(errorResponse.FieldErrors, FieldError { 71 | Field: "scan_folders", 72 | Message: "Please provide at least one Folder to scan", 73 | }) 74 | } else { 75 | for _, value := range scanFolders { 76 | if _, err := os.Stat(value); os.IsNotExist(err) || os.IsPermission(err) { 77 | errorResponse.FieldErrors = append(errorResponse.FieldErrors, FieldError { 78 | Field: "scan_folders", 79 | Message: "Folder not found: " + value, 80 | }) 81 | 82 | break 83 | } 84 | } 85 | } 86 | 87 | r, _ := regexp.Compile("^[0-9A-Fa-f]+$") 88 | 89 | for _, value := range SplitAndTrimSpaceArray(settingsForm.IgnoreDLCTitleIds, "\n") { 90 | if !r.MatchString(value) { 91 | errorResponse.FieldErrors = append(errorResponse.FieldErrors, FieldError { 92 | Field: "ignore_dlc_title_ids", 93 | Message: "Invalid Title ID: " + value, 94 | }) 95 | 96 | break 97 | } 98 | } 99 | 100 | return errorResponse 101 | }, func(value any) SuccessResponse { 102 | settingsForm := value.(*SettingsForm) 103 | scanFolders := SplitAndTrimSpaceArray(settingsForm.ScanFolders, "\n") 104 | 105 | appSettings := settings.ReadSettings(web.dataFolder) 106 | appSettings.Prodkeys = settingsForm.Prodkeys 107 | appSettings.IgnoreDLCTitleIds = SplitAndTrimSpaceArray(settingsForm.IgnoreDLCTitleIds, "\n") 108 | appSettings.Folder = scanFolders[0] 109 | if len(scanFolders) > 1 { 110 | appSettings.ScanFolders = scanFolders[1:] 111 | } else { 112 | appSettings.ScanFolders = []string{} 113 | } 114 | 115 | settings.SaveSettings(appSettings, web.dataFolder) 116 | web.appSettings = appSettings 117 | 118 | settings.InitSwitchKeys(web.dataFolder) 119 | web.buildLocalDB(web.localDbManager, true) 120 | 121 | return SuccessResponse { 122 | StrongMessage: "Success!", 123 | Message: "Settings changed successfully.", 124 | } 125 | }, web.embedFS, fsPatterns...) 126 | } 127 | -------------------------------------------------------------------------------- /web/synchronize.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "encoding/json" 5 | "net/http" 6 | ) 7 | 8 | func (web *Web) Synchronize() { 9 | web.state.IsSynchronizing = true 10 | 11 | go func () { 12 | web.state.switchDB = nil 13 | 14 | web.updateDB() 15 | 16 | if _, err := web.buildLocalDB(web.localDbManager, true); err != nil { 17 | web.sugarLogger.Error(err) 18 | } 19 | 20 | web.state.IsSynchronizing = false 21 | }() 22 | } 23 | 24 | func (web *Web) HandleSynchronize() { 25 | web.router.HandleFunc("/sync", func(w http.ResponseWriter, r *http.Request) { 26 | web.Synchronize() 27 | }).Methods("POST") 28 | 29 | web.router.HandleFunc("/sync", func(w http.ResponseWriter, r *http.Request) { 30 | w.Header().Set("Content-Type", "application/json") 31 | jsonEncoder := json.NewEncoder(w) 32 | jsonEncoder.Encode(web.state.IsSynchronizing) 33 | }).Methods("GET") 34 | } 35 | -------------------------------------------------------------------------------- /web/updates.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "fmt" 5 | "github.com/dtrunk90/switch-library-manager-web/pagination" 6 | "github.com/dtrunk90/switch-library-manager-web/process" 7 | "github.com/dtrunk90/switch-library-manager-web/settings" 8 | "strings" 9 | ) 10 | 11 | func (web *Web) HandleUpdates() { 12 | fsPatterns := []string { 13 | "resources/layout.html", 14 | "resources/partials/card.html", 15 | "resources/partials/filter.html", 16 | "resources/partials/pagination.html", 17 | "resources/pages/updates.html", 18 | } 19 | 20 | web.HandleFiltered("/updates.html", func(filter *TitleItemFilter) any { 21 | globalPageData.IsKeysFileAvailable = settings.IsKeysFileAvailable() 22 | globalPageData.Page = "updates" 23 | items, p := web.getMissingUpdates(filter) 24 | return TitleItemsPageData { 25 | GlobalPageData: globalPageData, 26 | TitleItems: items, 27 | Filter: filter, 28 | Pagination: p, 29 | } 30 | }, web.embedFS, fsPatterns...) 31 | } 32 | 33 | func (web *Web) getMissingUpdates(filter *TitleItemFilter) ([]TitleItem, pagination.Pagination) { 34 | items := []TitleItem{} 35 | 36 | if web.state.localDB == nil { 37 | return items, pagination.Calculate(filter.Page, filter.PerPage, 0) 38 | } 39 | 40 | missingUpdates := process.ScanForMissingUpdates(web.state.localDB.TitlesMap, web.state.switchDB.TitlesMap) 41 | 42 | for _, v := range missingUpdates { 43 | if filter.Keyword == "" || strings.Contains(strings.ToLower(v.Attributes.Id), strings.ToLower(filter.Keyword)) || strings.Contains(strings.ToLower(v.Attributes.Name), strings.ToLower(filter.Keyword)) { 44 | var imageUrl string 45 | if v.Attributes.IconUrl != "" { 46 | imageUrl = v.Attributes.IconUrl 47 | } else if v.Attributes.BannerUrl != "" { 48 | imageUrl = v.Attributes.BannerUrl 49 | } 50 | 51 | latest, err := strToTime("2006-01-02", v.LatestUpdateDate) 52 | if err != nil { 53 | web.sugarLogger.Error(fmt.Errorf("parsing time failed: %w", err)) 54 | } 55 | 56 | release, err := intToTime(v.Attributes.ReleaseDate) 57 | if err != nil { 58 | web.sugarLogger.Error(fmt.Errorf("parsing time failed: %w", err)) 59 | } 60 | 61 | items = append(items, TitleItem { 62 | ImageUrl: imageUrl, 63 | Id: strings.ToUpper(v.Attributes.Id), 64 | LatestUpdate: v.LatestUpdate, 65 | LatestUpdateDate: latest, 66 | LocalUpdate: v.LocalUpdate, 67 | Name: v.Attributes.Name, 68 | Region: v.Attributes.Region, 69 | ReleaseDate: release, 70 | Type: strings.ToUpper(v.Meta.Type), 71 | }) 72 | } 73 | } 74 | 75 | p := pagination.Calculate(filter.Page, filter.PerPage, len(items)) 76 | 77 | if err := sortItems(filter, items); err != nil { 78 | web.sugarLogger.Error(err) 79 | } 80 | 81 | return items[p.Start:p.End], p 82 | } 83 | -------------------------------------------------------------------------------- /web/web.go: -------------------------------------------------------------------------------- 1 | package web 2 | 3 | import ( 4 | "embed" 5 | "errors" 6 | "fmt" 7 | "github.com/dtrunk90/switch-library-manager-web/db" 8 | "github.com/dtrunk90/switch-library-manager-web/pagination" 9 | "github.com/dtrunk90/switch-library-manager-web/settings" 10 | "github.com/gorilla/mux" 11 | "go.uber.org/zap" 12 | "html/template" 13 | "log" 14 | "net/http" 15 | "os" 16 | "path/filepath" 17 | "strconv" 18 | "strings" 19 | "sync" 20 | "time" 21 | ) 22 | 23 | type WebState struct { 24 | sync.Mutex 25 | switchDB *db.SwitchTitlesDB 26 | localDB *db.LocalSwitchFilesDB 27 | IsSynchronizing bool 28 | } 29 | 30 | type Web struct { 31 | state WebState 32 | router *mux.Router 33 | embedFS embed.FS 34 | appSettings *settings.AppSettings 35 | dataFolder string 36 | localDbManager *db.LocalSwitchDBManager 37 | sugarLogger *zap.SugaredLogger 38 | } 39 | 40 | type TitleItem struct { 41 | ImageUrl string 42 | Id string 43 | LatestUpdate int 44 | LatestUpdateDate time.Time 45 | LocalUpdate int 46 | MissingDLC []string 47 | Name string 48 | Region string 49 | ReleaseDate time.Time 50 | Type string 51 | Version string 52 | } 53 | 54 | type GlobalPageData struct { 55 | IsKeysFileAvailable bool 56 | Page string 57 | SlmVersion string 58 | Version string 59 | } 60 | 61 | type TitleItemsPageData struct { 62 | GlobalPageData 63 | TitleItems []TitleItem 64 | Filter *TitleItemFilter 65 | Pagination pagination.Pagination 66 | } 67 | 68 | var funcMap = template.FuncMap { 69 | "add": func(a, b int) int { 70 | return a + b 71 | }, 72 | "eq": func(a, b interface{}) bool { 73 | return a == b 74 | }, 75 | "gt": func(a, b int) bool { 76 | return a > b 77 | }, 78 | "lt": func(a, b int) bool { 79 | return a < b 80 | }, 81 | "mkRange": func(min, max int) []int { 82 | a := make([]int, max - min + 1) 83 | for i := range a { 84 | a[i] = min + i 85 | } 86 | return a 87 | }, 88 | "mkSlice": func(args ...interface{}) []interface{} { 89 | return args 90 | }, 91 | "neq": func(a, b interface{}) bool { 92 | return a != b 93 | }, 94 | "formatTime": func(value time.Time) string { 95 | if value.IsZero() { 96 | return "" 97 | } 98 | 99 | return value.Format("2006-01-02") 100 | }, 101 | "subtract": func(a, b int) int { 102 | return a - b 103 | }, 104 | "toLower": strings.ToLower, 105 | } 106 | 107 | var globalPageData = GlobalPageData { 108 | SlmVersion: settings.SLM_VERSION, 109 | Version: settings.SLM_WEB_VERSION, 110 | } 111 | 112 | func intToTime(value int) (time.Time, error) { 113 | if value <= 0 { 114 | return time.Time{}, nil 115 | } 116 | 117 | return strToTime("20060102", strconv.Itoa(value)) 118 | } 119 | 120 | func strToTime(layout, value string) (time.Time, error) { 121 | if value == "" { 122 | return time.Time{}, nil 123 | } 124 | 125 | return time.Parse(layout, value) 126 | } 127 | 128 | func CreateWeb(router *mux.Router, embedFS embed.FS, appSettings *settings.AppSettings, dataFolder string, sugarLogger *zap.SugaredLogger) *Web { 129 | return &Web{state: WebState{}, router: router, embedFS: embedFS, appSettings: appSettings, dataFolder: dataFolder, sugarLogger: sugarLogger} 130 | } 131 | 132 | func (web *Web) Start() { 133 | titleFilePath := filepath.Join(web.dataFolder, settings.TITLE_JSON_FILENAME) 134 | versionsFilePath := filepath.Join(web.dataFolder, settings.VERSIONS_JSON_FILENAME) 135 | 136 | if titleFile, err := os.Open(titleFilePath); err == nil { 137 | if versionsFile, err := os.Open(versionsFilePath); err == nil { 138 | if switchTitleDB, err := db.CreateSwitchTitleDB(titleFile, versionsFile); err == nil { 139 | web.state.switchDB = switchTitleDB 140 | } 141 | } 142 | } 143 | 144 | localDbManager, err := db.NewLocalSwitchDBManager(web.dataFolder) 145 | if err != nil { 146 | web.sugarLogger.Error("Failed to create local files db\n", err) 147 | return 148 | } 149 | 150 | _, err = settings.InitSwitchKeys(web.dataFolder) 151 | if err != nil { 152 | web.sugarLogger.Errorf("Failed to initialize switch keys: %s", err) 153 | } 154 | 155 | web.localDbManager = localDbManager 156 | defer localDbManager.Close() 157 | 158 | if web.state.switchDB != nil { 159 | if _, err := web.buildLocalDB(web.localDbManager, false); err != nil { 160 | web.sugarLogger.Error(err) 161 | } 162 | } 163 | 164 | // Run http server 165 | web.HandleResources() 166 | web.HandleImages() 167 | web.HandleIndex() 168 | web.HandleMissing() 169 | web.HandleUpdates() 170 | web.HandleDLC() 171 | web.HandleIssues() 172 | web.HandleSettings() 173 | web.HandleSynchronize() 174 | web.HandleOrganize() 175 | web.HandleApi() 176 | 177 | web.router.Handle("/", http.RedirectHandler("/index.html", http.StatusMovedPermanently)) 178 | 179 | http.Handle("/", web.router) 180 | 181 | web.sugarLogger.Info("[SLM started]") 182 | 183 | if err := http.ListenAndServe(fmt.Sprint(":", web.appSettings.Port), nil); err != nil { 184 | web.sugarLogger.Error(fmt.Errorf("running http server failed: %w", err)) 185 | log.Fatal(err) 186 | } 187 | } 188 | 189 | func (web *Web) UpdateProgress(curr int, total int, message string) { 190 | web.sugarLogger.Debugf("%v (%v/%v)", message, curr, total) 191 | } 192 | 193 | func (web *Web) updateDB() { 194 | if web.state.switchDB == nil { 195 | switchDb, err := web.buildSwitchDb() 196 | if err != nil { 197 | web.sugarLogger.Error(err) 198 | return 199 | } 200 | web.state.switchDB = switchDb 201 | } 202 | } 203 | 204 | func (web *Web) buildSwitchDb() (*db.SwitchTitlesDB, error) { 205 | settingsObj := settings.ReadSettings(web.dataFolder) 206 | 207 | web.UpdateProgress(1, 4, "Downloading titles.json") 208 | filename := filepath.Join(web.dataFolder, settings.TITLE_JSON_FILENAME) 209 | titleFile, titlesEtag, err := db.LoadAndUpdateFile(settings.TITLES_JSON_URL, filename, settingsObj.TitlesEtag) 210 | 211 | if err != nil { 212 | return nil, errors.New("failed to download switch titles [reason:" + err.Error() + "]") 213 | } 214 | 215 | settingsObj.TitlesEtag = titlesEtag 216 | 217 | web.UpdateProgress(2, 4, "Downloading versions.json") 218 | filename = filepath.Join(web.dataFolder, settings.VERSIONS_JSON_FILENAME) 219 | versionsFile, versionsEtag, err := db.LoadAndUpdateFile(settings.VERSIONS_JSON_URL, filename, settingsObj.VersionsEtag) 220 | 221 | if err != nil { 222 | return nil, errors.New("failed to download switch updates [reason:" + err.Error() + "]") 223 | } 224 | 225 | settingsObj.VersionsEtag = versionsEtag 226 | 227 | settings.SaveSettings(settingsObj, web.dataFolder) 228 | 229 | web.UpdateProgress(3, 4, "Processing switch titles and updates ...") 230 | switchTitleDB, err := db.CreateSwitchTitleDB(titleFile, versionsFile) 231 | 232 | web.UpdateProgress(4, 4, "Finishing up...") 233 | 234 | return switchTitleDB, err 235 | } 236 | 237 | func (web *Web) buildLocalDB(localDbManager *db.LocalSwitchDBManager, ignoreCache bool) (*db.LocalSwitchFilesDB, error) { 238 | settingsObj := settings.ReadSettings(web.dataFolder) 239 | 240 | folderToScan := settingsObj.Folder 241 | 242 | scanFolders := settingsObj.ScanFolders 243 | scanFolders = append(scanFolders, folderToScan) 244 | 245 | localDB, err := localDbManager.CreateLocalSwitchFilesDB(web.state.switchDB, web.dataFolder, scanFolders, web, true, ignoreCache) 246 | web.state.localDB = localDB 247 | 248 | return localDB, err 249 | } 250 | --------------------------------------------------------------------------------