├── .gitattributes ├── .github └── workflows │ └── build.yml ├── .gitignore ├── .jvmopts ├── .scalafmt.conf ├── CHANGES.md ├── LICENSE ├── README.md ├── build.sbt ├── project ├── build.properties └── plugins.sbt ├── release.sh └── src ├── main ├── scala-2.11 │ └── better │ │ └── files │ │ └── CloseableIteratorCompat.scala ├── scala-2.12 │ └── better │ │ └── files │ │ └── CloseableIteratorCompat.scala ├── scala-2.13 │ └── better │ │ └── files │ │ └── CloseableIteratorCompat.scala ├── scala-2 │ └── better │ │ └── files │ │ └── ResourceCompat.scala ├── scala-3 │ └── better │ │ └── files │ │ ├── CloseableIteratorCompat.scala │ │ └── ResourceCompat.scala └── scala │ └── better │ └── files │ ├── CloseableIterator.scala │ ├── Dispose.scala │ ├── Dsl.scala │ ├── File.scala │ ├── FileMonitor.scala │ ├── Implicits.scala │ ├── ReaderInputStream.scala │ ├── Resource.scala │ ├── Scanner.scala │ ├── TeeOutputStream.scala │ ├── UnicodeCharset.scala │ ├── WriterOutputStream.scala │ └── package.scala ├── site-preprocess └── index.html ├── site └── tech_talk_preview.png └── test ├── java └── better │ └── files │ └── benchmarks │ └── ArrayBufferScanner.java ├── resources └── better │ └── files │ ├── issue-624.zip │ ├── issues-262.zip │ ├── issues-326.txt │ ├── test-file.txt │ └── test_pkg │ └── another-test-file.txt ├── scala-2 └── better │ └── files │ └── ShapelessScannerSpec.scala └── scala └── better └── files ├── CloseableIteratorSpec.scala ├── CommonSpec.scala ├── DisposeSpec.scala ├── FileMonitorSpec.scala ├── FileSpec.scala ├── GlobSpec.scala ├── ResourceSpec.scala ├── ScannerSpec.scala ├── akka ├── FileWatcher.scala ├── FileWatcherSpec.scala └── README.md ├── benchmarks ├── Benchmark.scala ├── EncodingBenchmark.scala ├── ScannerBenchmark.scala └── Scanners.scala └── test_pkg └── ResourceSpecHelper.scala /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | tags: ["*"] 7 | pull_request: 8 | branches: [master] 9 | 10 | permissions: 11 | contents: write 12 | 13 | jobs: 14 | test: 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | os: [ubuntu-latest, macos-latest, windows-latest] 19 | java: [8, 11, 17] # Include _only_ LTS Java versions in matrix: https://www.oracle.com/java/technologies/java-se-support-roadmap.html 20 | 21 | runs-on: ${{matrix.os}} 22 | continue-on-error: ${{matrix.os == 'windows-latest'}} 23 | 24 | steps: 25 | - uses: actions/checkout@v3 26 | 27 | - name: Set up JDK ${{matrix.java}} 28 | uses: olafurpg/setup-scala@v13 29 | with: 30 | java-version: ${{matrix.java}} 31 | 32 | - name: Cache scala dependencies 33 | uses: coursier/cache-action@v6 34 | 35 | - name: Unit test 36 | run: sbt +test 37 | 38 | # Adapted from https://github.com/sbt/sbt-ci-release/blob/main/.github/workflows/release.yml 39 | publish: 40 | runs-on: ubuntu-latest 41 | 42 | needs: test 43 | if: github.ref == 'refs/heads/master' 44 | 45 | steps: 46 | - uses: actions/checkout@v3 47 | with: 48 | fetch-depth: 0 49 | 50 | - uses: olafurpg/setup-scala@v13 51 | 52 | - name: Release 53 | run: sbt ci-release 54 | env: 55 | PGP_PASSPHRASE: ${{secrets.PGP_PASSPHRASE}} 56 | PGP_SECRET: ${{secrets.PGP_SECRET}} 57 | SONATYPE_PASSWORD: ${{secrets.SONATYPE_PASSWORD}} 58 | SONATYPE_USERNAME: ${{secrets.SONATYPE_USERNAME}} 59 | 60 | - name: Generate scaladoc 61 | run: sbt +makeSite 62 | 63 | - name: Generate coverage reports 64 | run: sbt coverage test coverageReport coverageAggregate 65 | 66 | - name: Upload coverage reports 67 | uses: codecov/codecov-action@v3 68 | 69 | - name: Publish Github Pages 70 | uses: JamesIves/github-pages-deploy-action@v4 71 | with: 72 | folder: target/site 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Scala 2 | *.class 3 | *.log 4 | 5 | # SBT 6 | dist/* 7 | target/ 8 | lib_managed/ 9 | src_managed/ 10 | project/boot/ 11 | project/plugins/project/ 12 | .history 13 | .cache 14 | .lib/ 15 | .bsp/ 16 | out/ 17 | 18 | # Metals 19 | .bloop/ 20 | .metals/ 21 | 22 | # Intellij 23 | .idea/ 24 | *.iws 25 | .idea_modules/ 26 | *.iml 27 | *.ipr 28 | 29 | # Linux 30 | *~ 31 | .fuse_hidden* 32 | .directory 33 | .Trash-* 34 | .nfs* 35 | 36 | # OSX 37 | *.DS_Store 38 | .AppleDouble 39 | .LSOverride 40 | ._* 41 | .DocumentRevisions-V100 42 | .fseventsd 43 | .Spotlight-V100 44 | .TemporaryItems 45 | .Trashes 46 | .VolumeIcon.icns 47 | .com.apple.timemachine.donotpresent 48 | .AppleDB 49 | .AppleDesktop 50 | Network Trash Folder 51 | Temporary Items 52 | .apdisk 53 | Icon 54 | 55 | # Windows 56 | Thumbs.db 57 | ehthumbs.db 58 | ehthumbs_vista.db 59 | Desktop.ini 60 | $RECYCLE.BIN/ 61 | *.cab 62 | *.msi 63 | *.msm 64 | *.msp 65 | *.lnk 66 | -------------------------------------------------------------------------------- /.jvmopts: -------------------------------------------------------------------------------- 1 | -Dfile.encoding=UTF8 2 | -Xmx1G 3 | -XX:ReservedCodeCacheSize=256M 4 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | maxColumn = 140 2 | align.preset = more 3 | version = 3.7.1 4 | project.layout = StandardConvention 5 | runner.dialect = scala213source3 6 | 7 | rewrite.rules = [Imports] 8 | rewrite.imports.sort = scalastyle 9 | rewrite.imports.groups = [ 10 | ["java\\..*"], 11 | ["scala\\..*"], 12 | ["better\\.files\\..*"], 13 | ["org\\.scalatest\\..*"], 14 | ] 15 | 16 | # Added to minimize changes 17 | docstrings.style = keep 18 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | better-files follows the following `MAJOR.MINOR.PATCH` release conventions: 2 | - **Changes in `PATCH` version**: 3 | - Minor functionality changes (usually bug fixes) 4 | - No breaking public API changes 5 | - New APIs might be added 6 | - **Change in `MINOR` version**: 7 | - In addition to `PATCH` changes 8 | - Minor API shape changes e.g. renaming, deprecations 9 | - Trivial to modify code to address compilation issues 10 | - **Change in `MAJOR` version**: 11 | - In addition to `MINOR` changes 12 | - Significant structural and API changes 13 | 14 | ----------- 15 | 16 | ## v4.0.0 [WIP] 17 | * (_Planned_) [Issue #589](https://github.com/pathikrit/better-files/issues/589): Methods that do I/O has parens `()` 18 | - **Migration Guide**: Some APIs need extra parens e.g. `file.size` is now `file.size()` and `dir.list` is now `dir.list()` 19 | * [PR #584](https://github.com/pathikrit/better-files/pull/584): Remove implicit options from all APIs. 20 | - **Migration Guide**: Instead of implicit params like `implicit charset: Charset`, it is now explicitly passed in: 21 | ```scala 22 | // Before 23 | def appendText(text: String)(implicit charset: Charset = DefaultCharset) 24 | 25 | // Now 26 | def appendText(text: String, charset: Charset = DefaultCharset) 27 | ``` 28 | * [Issue #295](https://github.com/pathikrit/better-files/pull/587): `CloseableIterators` fixes resource leaks on partial iterations ([#270](https://github.com/pathikrit/better-files/issues/270), [#403](https://github.com/pathikrit/better-files/issues/403), [#553](https://github.com/pathikrit/better-files/issues/553)) 29 | - **Migration Guide** 30 | ```scala 31 | val lines: Iterator[String] = file.lines() // This will auto close the underlying stream on iterator exhaustion 32 | 33 | lines.find(_ == "hello world") //This will auto close the stream if nothing is found OR if the item is found 34 | lines.take(10).size //This will close the stream even if stream has >10 lines 35 | 36 | // BEFORE: The above _partial_ operations would leave the stream open 37 | // NOW: The streams would be closed 38 | 39 | // If you still want the previous auto closing behaviour: 40 | lines.nonClosing().take(10).size // This would leave stream open if it has >10 lines 41 | ``` 42 | * (_Planned_) [Issue #88](https://github.com/pathikrit/better-files/issues/88): Path ASTs for relative vs. absolute path 43 | * [Issue #593](https://github.com/pathikrit/better-files/pull/598): Remove compilation warnings for Scala 2.13 and Scala 3 44 | * [Issue #624](https://github.com/pathikrit/better-files/pull/633): Fix zip-slip vulnerability 45 | * (_Planned_) [Issue #590](https://github.com/pathikrit/better-files/issues/590): `file.walk()` can handle errors 46 | * (_Planned_) [Issue #591](https://github.com/pathikrit/better-files/issues/591): New APIs 47 | * (_Planned_) [Issue #3](https://github.com/pathikrit/better-files/issues/3): Walk File Tree APIs 48 | * (_Planned_) [Issue #221](https://github.com/pathikrit/better-files/issues/221): `linkTo` bug fix 49 | * (_Planned_) [Issue #129](https://github.com/pathikrit/better-files/issues/129): JSR-203 and JimFS compatibility 50 | * (_Planned_) [Issue #88](https://github.com/pathikrit/better-files/issues/88): Strongly typed relative and absolute path APIs 51 | * (_Planned_) [Issue #204](https://github.com/pathikrit/better-files/issues/204): Universal converter APIs 52 | 53 | ## [v3.9.2](https://github.com/pathikrit/better-files/releases/tag/v3.9.2) 54 | * [PR #573](https://github.com/pathikrit/better-files/pull/573): Scala 3 Support! 55 | * [PR #426](https://github.com/pathikrit/better-files/pull/426): Add `UnicodeCharset.isValid()` 56 | * [PR #428](https://github.com/pathikrit/better-files/pull/428): Add `File.resourcePathAsString` 57 | * [PR #436](https://github.com/pathikrit/better-files/pull/436): Exclude destination zip file from final output 58 | * [PR #429](https://github.com/pathikrit/better-files/pull/429): Add `URI` and `URL` helpers 59 | * [Issue #478](https://github.com/pathikrit/better-files/issues/478): Handle broken symlinks in `size()` 60 | * [Issue #412](https://github.com/pathikrit/better-files/issues/412): Better error message when URI is not a valid file 61 | 62 | ## [v3.9.1](https://github.com/pathikrit/better-files/releases/tag/v3.9.1) 63 | * [Issue #417](https://github.com/pathikrit/better-files/issues/417): Fix `NoSuchMethodError` when reading `contentAsString` on JDK8 64 | 65 | ## [v3.9.0](https://github.com/pathikrit/better-files/releases/tag/v3.9.0) 66 | * [Issue #326](https://github.com/pathikrit/better-files/issues/326): Do not mark end of input when charset is detected from BOM 67 | * [Issue #332](https://github.com/pathikrit/better-files/issues/332): Fix `setGroup` - it was using semantics of `setOwner` 68 | * [Checksum utils for streams](https://github.com/pathikrit/better-files/issues/330) 69 | * [Issue #316](https://github.com/pathikrit/better-files/issues/316): Rename `isWriteable` to `isWritable` 70 | * [Issue #380](https://github.com/pathikrit/better-files/issues/380): Zip API exception in JDK 11 71 | * [Issue #391](https://github.com/pathikrit/better-files/issues/391): Handle NPE in `FileMonitor` for large folders 72 | * [Issue #362](https://github.com/pathikrit/better-files/issues/362): Add API to skip missing files when calculating size on a directory 73 | * [Issue #320](https://github.com/pathikrit/better-files/issues/320): Change extension works when file is not present 74 | 75 | ## [v3.8.0](https://github.com/pathikrit/better-files/releases/tag/v3.8.0) 76 | * [PR #312](https://github.com/pathikrit/better-files/issues/312): Scala 2.13.0-RC1 release 77 | * [Issue #309](https://github.com/pathikrit/better-files/issues/312): Recursive `deleteOnExit` support 78 | * [Rename](https://github.com/pathikrit/better-files/commit/ae45c6b419a53a7095e3dadccda010eb4d624fc6) certain implicit utils 79 | 80 | ## [v3.7.1](https://github.com/pathikrit/better-files/releases/tag/v3.7.1) 81 | * [Issue #283](https://github.com/pathikrit/better-files/issues/283): Fix resource not closing bug on File#list 82 | * [Issue #279](https://github.com/pathikrit/better-files/issues/279): Better manage open file handles in recursive deletion of large directories 83 | * [Issue #285](https://github.com/pathikrit/better-files/issues/285): Add canonical file/path APIs 84 | * [PR #290](https://github.com/pathikrit/better-files/pull/290) Add maxDepth to File#glob and File#globRegex 85 | 86 | ## [v3.7.0](https://github.com/pathikrit/better-files/releases/tag/v3.7.0) 87 | * [Issue #248](https://github.com/pathikrit/better-files/issues/248): Release for Scala 2.13.0-M5 88 | * [Issue #270](https://github.com/pathikrit/better-files/issues/270): `FileTreeIterator` can be traversed multiple times safely 89 | * [Issue #262](https://github.com/pathikrit/better-files/issues/262): Handle backslashes in zip entry name 90 | * [Issue #278](https://github.com/pathikrit/better-files/issues/278): Dispose multiple resources 91 | * [Util](https://github.com/pathikrit/better-files/commit/07f0f69b7a544e74720ac60f0f5921d8a0becc8e) to fetch root Resource URL 92 | * [`using`](https://github.com/pathikrit/better-files/commit/2a7c438ef672d2b414027e96c7fcecc11a9b791b) util for disposable resources 93 | * [file.lineCount](https://github.com/pathikrit/better-files/commit/af315c9b1311c9baeab9b0a70a388e772b6a5eaf) util 94 | * [inputstream.byteArray](https://github.com/pathikrit/better-files/commit/1657d8b30c836059813637a5a0d412d7a924467f) util 95 | 96 | ## [v3.6.0](https://github.com/pathikrit/better-files/releases/tag/v3.6.0) 97 | * [Issue #123](https://github.com/pathikrit/better-files/issues/233): Rename ManagedResource to Dispose 98 | * [Issue #241](https://github.com/pathikrit/better-files/issues/241): Remove resource leak from directory empty check 99 | * [Issue #242](https://github.com/pathikrit/better-files/issues/242): Support for JDK 9 and JDK 10 100 | * [Remove Files alias](https://github.com/pathikrit/better-files/commit/bfccb5041239bc5413afade4218ec1fb90d3e3d5) 101 | * [List with filter API](https://github.com/pathikrit/better-files/commit/41e521b9a95a7f3ae5affb1a8eb798a0b2358445) 102 | * More [createIfNotExists() APIs](https://github.com/pathikrit/better-files/commit/9c83d8b6c6eeb361eed5ffcf3e0810b207af7939) 103 | * [Issue #247](https://github.com/pathikrit/better-files/issues/247): Strict equality for contains/isParentOf/isChildOf 104 | * [Issue #249](https://github.com/pathikrit/better-files/issues/249): Make File serializable 105 | * More [ZIP I/O helpers](https://github.com/pathikrit/better-files/commit/59c17c60eb22daad4a8690c052169c379fe3d5e3) 106 | * More [String to I/O helpers](https://github.com/pathikrit/better-files/commit/5afb5f1ac58b248582e5cffcd8f32ebb2d91cd83) 107 | 108 | ## [v3.5.0](https://github.com/pathikrit/better-files/releases/tag/v3.5.0) 109 | * [PR #230](https://github.com/pathikrit/better-files/pull/230): New Resource APIs with [module safety](https://github.com/pathikrit/better-files/pull/227) 110 | * [Issue #224](https://github.com/pathikrit/better-files/issues/224): FileMonitor should not block threads 111 | 112 | ## [v3.4.0](https://github.com/pathikrit/better-files/releases/tag/v3.4.0) 113 | * [PR #202](https://github.com/pathikrit/better-files/pull/202): for-comprehension friendly ARM 114 | * [PR #203](https://github.com/pathikrit/better-files/pull/203): Type-class for Scanner construction 115 | * Remove [redundant `count` param](https://github.com/pathikrit/better-files/commit/8cc66d0e8ac6517597eeb1db1814903f2256b805) from `File.Monitor#onUnknownEvent` 116 | 117 | ## [v3.3.1](https://github.com/pathikrit/better-files/releases/tag/v3.3.1) 118 | * [Issue #146](https://github.com/pathikrit/better-files/issues/146): Release for Scala 2.11 119 | 120 | ## [v3.3.0](https://github.com/pathikrit/better-files/releases/tag/v3.3.0) 121 | * [Issue #193](https://github.com/pathikrit/better-files/issues/193): Handle fast changing directory watching on Windows 122 | * [Issue #195](https://github.com/pathikrit/better-files/issues/195): Do not swallow `FileAlreadyExistsException` when creating directory or file 123 | * [Add](https://github.com/pathikrit/better-files/commit/00f27867ebd0cddec1ace7835dcc2375869fb3ae) method to check verified file existence (or non-existence) 124 | * [Issue #198](https://github.com/pathikrit/better-files/issues/198): `InputStreamOps#asString` doesn't close the stream on exception 125 | * [PR #199](https://github.com/pathikrit/better-files/pull/199): Utils for Object I/O 126 | * [PR #200](https://github.com/pathikrit/better-files/pull/200): GZIP APIs 127 | 128 | ## [v3.2.0](https://github.com/pathikrit/better-files/releases/tag/v3.2.0) 129 | * [Rename](https://github.com/pathikrit/better-files/commit/ec34a6f843fec941b51bdddafc2e07e5bc0e1cbb) PosixFilePermissions.OTHERS* APIs 130 | * [Issue #186](https://github.com/pathikrit/better-files/issues/186): Splitter based Scanner 131 | * [Issue #173](https://github.com/pathikrit/better-files/issues/173): Better ARM handling of fatal errors 132 | * [Issue #182](https://github.com/pathikrit/better-files/issues/182): Move and Copy *into* directory utils 133 | * [Issue #189](https://github.com/pathikrit/better-files/issues/189): Util to read String from an InputStream 134 | * [Issue #187](https://github.com/pathikrit/better-files/issues/187): Readers for `java.time.*` and `java.sql.*` 135 | * [Restore File.usingTemp](https://github.com/pathikrit/better-files/commit/35184a642245db3d1e41fc02c7bfbec0b19a43bb) first introduced in [7c60ca](https://github.com/pathikrit/better-files/commit/d3522e8da63b55c7d3fa14cc9b0b76acd57c60ca) 136 | * [Fix](https://github.com/pathikrit/better-files/pull/184) bug in appendBytes 137 | 138 | ## [v3.1.0](https://github.com/pathikrit/better-files/releases/tag/v3.1.0) 139 | * [Issue #140](https://github.com/pathikrit/better-files/issues/140): Batch up events for file monitoring 140 | * [Issue #136](https://github.com/pathikrit/better-files/issues/136): Use execution contexts for file monitoring 141 | * [Issue #152](https://github.com/pathikrit/better-files/issues/152): Streamed unzipping 142 | * [Issue #150](https://github.com/pathikrit/better-files/issues/150): `ManagedResource[File]` for temp files 143 | * [Issue #126](https://github.com/pathikrit/better-files/pull/159): New Typeclassed approach to ARM 144 | * [Issue #160](https://github.com/pathikrit/better-files/issues/160): Ability to convert Reader/Writer to Input/Output streams 145 | * [Issue #77](https://github.com/pathikrit/better-files/issues/77): Better UNIX-y behaviour for `cp` and `mv` DSL utils 146 | * [Issue #169](https://github.com/pathikrit/better-files/issues/169): Support for symbols in file DSL 147 | * [Issue #171](https://github.com/pathikrit/better-files/issues/171): Handle `createDirectories()` on symlinks to existing directories 148 | 149 | ## [v3.0.0](https://github.com/pathikrit/better-files/releases/tag/v3.0.0) 150 | * [Issue #9](https://github.com/pathikrit/better-files/issues/9): File resource utils 151 | * [Issue #114](https://github.com/pathikrit/better-files/issues/114): Glob with automatic path 152 | * [Issue #107](https://github.com/pathikrit/better-files/issues/107): Handle Byte-order markers 153 | * [PR #113](https://github.com/pathikrit/better-files/pull/113): File anchor util 154 | * [Issue #105](https://github.com/pathikrit/better-files/issues/105): Remove dependency on scala.io 155 | * [File.usingTemp](https://github.com/pathikrit/better-files/commit/d3522e8da63b55c7d3fa14cc9b0b76acd57c60ca) 156 | * [Optional symbolic operations](https://github.com/pathikrit/better-files/issues/102) 157 | * [PR #100](https://github.com/pathikrit/better-files/pull/100): Fix issue in unzip of parents 158 | * [PR #101](https://github.com/pathikrit/better-files/pull/101): Removed File.Type 159 | * [Issue #96](https://github.com/pathikrit/better-files/issues/96): Teeing outputstreams 160 | * [File.testPermission](https://github.com/pathikrit/better-files/commit/7b175c582643790e4d2fd21552e47cc9c615dfbb) 161 | * [File.nonEmpty](https://github.com/pathikrit/better-files/commit/18c9cd51b7b2e503ff4944050ac5119470869e6e) 162 | * [Update metadata API](https://github.com/pathikrit/better-files/commit/c3d65951d80f09b813e158a9e3a1785c622353b3) 163 | * [Issue #80](https://github.com/pathikrit/better-files/issues/80): Unzip filters 164 | * [PR #107](https://github.com/pathikrit/better-files/pull/127): Java serialization utils 165 | 166 | ## [v2.17.1](https://github.com/pathikrit/better-files/releases/tag/v2.17.1) 167 | * [PR #99](https://github.com/pathikrit/better-files/pull/99): Release for Scala 2.12 168 | 169 | ## [v2.17.0](https://github.com/pathikrit/better-files/releases/tag/v2.17.0) 170 | * [PR #78](https://github.com/pathikrit/better-files/pull/78): Change `write(Array[Byte])` to `writeByteArray()`. Same for `append` 171 | * [Issue #76](https://github.com/pathikrit/better-files/issues/76): Move `better.files.Read` typeclass to `better.files.Scanner.Read` 172 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015-2023 Pathikrit Bhowmick 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | val username = "pathikrit" 2 | val repo = "better-files" 3 | 4 | inThisBuild( 5 | List( 6 | organization.withRank(KeyRanks.Invisible) := repo.replace("-", "."), 7 | homepage := Some(url(s"https://github.com/$username/$repo")), 8 | licenses := List("MIT" -> url(s"https://github.com/$username/$repo/blob/master/LICENSE")), 9 | developers := List( 10 | Developer( 11 | id = username, 12 | name = "Pathikrit Bhowmick", 13 | email = "pathikritbhowmick@msn.com", 14 | url = new URL(s"http://github.com/$username") 15 | ) 16 | ), 17 | Global / onChangedBuildSource := ReloadOnSourceChanges 18 | ) 19 | ) 20 | 21 | lazy val main = (project in file(".")) 22 | .settings( 23 | name := repo, 24 | description := "Simple, safe and intuitive I/O in Scala", 25 | organization := s"com.github.$username", 26 | crossScalaVersions := Seq("2.11.12", "2.12.17", "2.13.10", "3.2.2"), 27 | crossVersion := CrossVersion.binary, 28 | scalacOptions := scalacOptions.value diff rmCompilerFlags(scalaVersion.value), 29 | Compile / compile := (Compile / compile).dependsOn(Compile / scalafmt, Test / scalafmt, Compile / scalafmtSbt).value, 30 | Test / test := (Test / test).dependsOn(Compile / scalafmtCheck, Test / scalafmtCheck, Compile / scalafmtSbtCheck).value, 31 | Test / testOptions += Tests.Argument("-oDF"), // show full stack trace on test failures 32 | libraryDependencies ++= dependencies(scalaVersion.value) 33 | ) 34 | // makeSite settings 35 | .enablePlugins(SiteScaladocPlugin, PreprocessPlugin) 36 | .settings( 37 | SiteScaladoc / siteSubdirName := "api/default", 38 | Preprocess / preprocessVars := Map( 39 | "scalaVersions" -> crossScalaVersions.value.map(CrossVersion.binaryScalaVersion).map(v => s"'$v'").mkString(", ") 40 | ), 41 | makeSite := { 42 | val dest = makeSite.value 43 | // See https://github.com/sbt/sbt/discussions/7151: Hack to make makeSite play well with crossScalaVersion 44 | IO.copyDirectory(source = (Compile / doc).value, target = dest / "api" / CrossVersion.binaryScalaVersion(scalaVersion.value)) 45 | dest 46 | } 47 | ) 48 | 49 | /** We use https://github.com/DavidGregory084/sbt-tpolecat but this gives us a way to remove some unruly flags */ 50 | def rmCompilerFlags(scalaVersion: String): Seq[String] = 51 | CrossVersion.binaryScalaVersion(scalaVersion) match { 52 | case "2.11" | "2.12" => Seq("-Ywarn-value-discard") 53 | case "2.13" => Seq("-Wvalue-discard", "-Wnonunit-statement", "-Wunused:imports") 54 | case _ => Nil 55 | } 56 | 57 | /** My dependencies - note this is a zero dependency library, so following are only for Tests or cross build support */ 58 | def dependencies(scalaVersion: String): Seq[ModuleID] = 59 | Seq( 60 | // TODO: Get rid of scala-collection-compat when we drop support for Scala 2.12 and -Wunused:imports since it triggers https://github.com/scala/scala-collection-compat/issues/240 61 | "*" -> ("org.scala-lang.modules" %% "scala-collection-compat" % "2.9.0"), 62 | "2" -> ("org.scala-lang" % "scala-reflect" % scalaVersion % Provided), 63 | "2" -> ("com.chuusai" %% "shapeless" % "2.3.4" % Test), // For shapeless based Reader/Scanner in tests 64 | "*" -> ("org.scalatest" %% "scalatest" % "3.2.15" % Test), 65 | "*" -> ("commons-io" % "commons-io" % "2.11.0" % Test), 66 | "*" -> ("fastjavaio" % "fastjavaio" % "1.0" from "https://github.com/williamfiset/FastJavaIO/releases/download/v1.0/fastjavaio.jar") % Test, // Benchmarks 67 | "*" -> ("com.typesafe.akka" %% "akka-actor" % (if (scalaVersion.startsWith("2.11")) "2.5.32" else "2.7.0") % Test) 68 | ).collect({ case (v, lib) if v == "*" || scalaVersion.startsWith(v) => lib }) 69 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.8.2 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11") 2 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7") 3 | addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.4.2") 4 | addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.4.2") 5 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") 6 | addSbtPlugin("com.github.sbt" % "sbt-site-paradox" % "1.5.0-RC2") 7 | -------------------------------------------------------------------------------- /release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euxo pipefail 4 | 5 | # See https://github.com/olafurpg/sbt-ci-release#git 6 | # Usage ./release.sh v3.9.1 7 | # Note this force pushes using -f below so that we can retry deploys 8 | VERSION=$1 9 | git tag -f -a ${VERSION} -m "${VERSION}" 10 | git push origin ${VERSION} -f 11 | -------------------------------------------------------------------------------- /src/main/scala-2.11/better/files/CloseableIteratorCompat.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.collection.GenTraversableOnce 4 | 5 | trait CloseableIteratorCompat[+A] extends CloseableIterator[A] { self => 6 | override def indexWhere(p: A => Boolean) = evalAndClose(super.indexWhere(p)) 7 | override def indexOf[B >: A](elem: B) = evalAndClose(super.indexOf(elem)) 8 | override def ++[B >: A](that: => GenTraversableOnce[B]) = closeInTheEnd(super.++(that)) 9 | 10 | override def drop(n: Int) = slice(n, Int.MaxValue) 11 | 12 | override def slice(from: Int, until: Int) = closeInTheEnd(new Iterator[A] { 13 | var i = 0 14 | 15 | override def hasNext = { 16 | while (i < from && self.hasNext) i += 1 17 | i < until && self.hasNext 18 | } 19 | 20 | override def next() = { 21 | i += 1 22 | self.next() 23 | } 24 | }) 25 | 26 | override def zip[B](that: Iterator[B]) = 27 | that match { 28 | case other: CloseableIterator[_] => CloseableIterator(super.zip(that), () => other.evalAndClose(this.closeOnce())) 29 | case _ => closeInTheEnd(super.zip(that)) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/scala-2.12/better/files/CloseableIteratorCompat.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.collection.GenTraversableOnce 4 | 5 | private[files] trait CloseableIteratorCompat[+A] extends CloseableIterator[A] { 6 | override protected def sliceIterator(from: Int, until: Int) = closeInTheEnd(super.sliceIterator(from, until)) 7 | override def indexWhere(p: A => Boolean, from: Int) = evalAndClose(super.indexWhere(p, from)) 8 | override def indexOf[B >: A](elem: B, from: Int) = evalAndClose(super.indexOf(elem, from)) 9 | override def ++[B >: A](that: => GenTraversableOnce[B]) = closeInTheEnd(super.++(that)) 10 | 11 | override def drop(n: Int) = slice(n, Int.MaxValue) 12 | 13 | override def zip[B](that: Iterator[B]) = 14 | that match { 15 | case other: CloseableIterator[_] => CloseableIterator(super.zip(that), () => other.evalAndClose(this.closeOnce())) 16 | case _ => closeInTheEnd(super.zip(that)) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/main/scala-2.13/better/files/CloseableIteratorCompat.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.collection.IterableOnce 4 | 5 | private[files] trait CloseableIteratorCompat[+A] extends CloseableIterator[A] { 6 | override protected def sliceIterator(from: Int, until: Int) = closeInTheEnd(super.sliceIterator(from, until)) 7 | override def indexWhere(p: A => Boolean, from: Int) = evalAndClose(super.indexWhere(p, from)) 8 | override def indexOf[B >: A](elem: B, from: Int) = evalAndClose(super.indexOf(elem, from)) 9 | override def concat[B >: A](xs: => IterableOnce[B]) = closeInTheEnd(super.concat(xs)) 10 | 11 | override def zip[B](that: IterableOnce[B]) = 12 | that match { 13 | case other: CloseableIterator[_] => CloseableIterator(super.zip(that), () => other.evalAndClose(this.closeOnce())) 14 | case _ => closeInTheEnd(super.zip(that)) 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/main/scala-2/better/files/ResourceCompat.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.reflect.macros.{blackbox, ReificationException} 4 | 5 | private[files] trait ResourceCompat { 6 | 7 | /** Look up class resource files. 8 | * 9 | * This Resource looks up resources relative to the JVM class file for `T`, 10 | * using [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]]. 11 | * For example, if `com.example.ExampleClass` is given for `T`, then resource files will be searched for in the `com/example` folder containing `ExampleClass.class`. 12 | * 13 | * If you want to look up resource files relative to the call site instead (that is, you want a class to look up one of its own resources), use the `my` method instead. 14 | * 15 | * @example {{{ Resource.at[YourClass].url("config.properties") }}} 16 | * @tparam T The class, trait, or object to look up from. Objects must be written with a `.type` suffix, such as `Resource.at[SomeObject.type]`. 17 | * @return A Resource for `T`. 18 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 19 | */ 20 | def at[T]: Resource = macro Macros.atStaticImpl[T] 21 | 22 | /** Look up class resource files. 23 | * 24 | * This Resource looks up resources from the given Class, 25 | * using [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]]. 26 | * For example, if `classOf[com.example.ExampleClass]` is given for `clazz`, then resource files will be searched for 27 | * in the `com/example` folder containing `ExampleClass.class`. 28 | * 29 | * If you want to look up resource files relative to the call site instead (that is, you want your class to look up one of its own resources), 30 | * use the `my` method instead. 31 | * 32 | * @example {{{ Resource.at(Class.forName("your.AppClass")).url("config.properties") }}} 33 | * 34 | * In this example, a file named `config.properties` is expected to appear alongside the file `AppClass.class` in the package `your`. 35 | * @param clazz The class to look up from. 36 | * @return A Resource for `clazz`. 37 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 38 | */ 39 | def at(clazz: Class[_]): Resource = macro Macros.atDynamicImpl 40 | 41 | /** Look up own resource files. 42 | * 43 | * This Resource looks up resources from the [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html Class]] surrounding the call, 44 | * using [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]]. 45 | * For example, if `my` is called from `com.example.ExampleClass`, 46 | * then resource files will be searched for in the `com/example` folder containing `ExampleClass.class`. 47 | * 48 | * @example {{{ Resource.my.url("config.properties") }}} 49 | * @return A Resource for the call site. 50 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 51 | */ 52 | def my: Resource = macro Macros.myImpl 53 | } 54 | 55 | /** Implementations of the `Resource.at` macros. This is needed because `Class#getResource` is caller-sensitive; 56 | * calls to it must appear in user code, ''not'' in better-files. 57 | */ 58 | private[files] final class Macros(val c: blackbox.Context) { 59 | 60 | import c.Expr 61 | import c.universe._ 62 | 63 | def atStaticImpl[T](implicit T: WeakTypeTag[T]): Expr[Resource] = { 64 | val rtc = Expr[Class[_]] { 65 | try { 66 | c.reifyRuntimeClass(T.tpe, concrete = true) 67 | } catch { 68 | case _: ReificationException => c.abort(c.enclosingPosition, s"${T.tpe} is not a concrete type") 69 | } 70 | } 71 | atDynamicImpl(rtc) 72 | } 73 | 74 | def atDynamicImpl(clazz: Expr[Class[_]]): Expr[Resource] = 75 | reify { 76 | new Resource { 77 | override def url(name: String) = Option(clazz.splice.getResource(name)) 78 | } 79 | } 80 | 81 | def myImpl: Expr[Resource] = { 82 | val rtc = c.reifyEnclosingRuntimeClass 83 | if (rtc.isEmpty) { 84 | // The documentation for reifyEnclosingRuntimeClass claims that this is somehow possible!? 85 | c.abort(c.enclosingPosition, "this location doesn't correspond to a Java class file") 86 | } 87 | atDynamicImpl(Expr[Class[_]](rtc)) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/main/scala-3/better/files/CloseableIteratorCompat.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.collection.IterableOnce 4 | 5 | private[files] trait CloseableIteratorCompat[+A] extends CloseableIterator[A] { 6 | override protected def sliceIterator(from: Int, until: Int) = closeInTheEnd(super.sliceIterator(from, until)) 7 | override def indexWhere(p: A => Boolean, from: Int) = evalAndClose(super.indexWhere(p, from)) 8 | override def indexOf[B >: A](elem: B, from: Int) = evalAndClose(super.indexOf(elem, from)) 9 | override def concat[B >: A](xs: => IterableOnce[B]) = closeInTheEnd(super.concat(xs)) 10 | 11 | override def zip[B](that: IterableOnce[B]) = 12 | that match { 13 | case other: CloseableIterator[_] => CloseableIterator(super.zip(that), () => other.evalAndClose(this.closeOnce())) 14 | case _ => closeInTheEnd(super.zip(that)) 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/main/scala-3/better/files/ResourceCompat.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.quoted.* 4 | 5 | private[files] trait ResourceCompat { 6 | 7 | /** Look up class resource files. 8 | * 9 | * This Resource looks up resources relative to the JVM class file for `T`, using 10 | * [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]]. For example, if 11 | * `com.example.ExampleClass` is given for `T`, then resource files will be searched for in the `com/example` folder containing 12 | * `ExampleClass.class`. 13 | * 14 | * If you want to look up resource files relative to the call site instead (that is, you want a class to look up one of its own 15 | * resources), use the `my` method instead. 16 | * 17 | * @example 18 | * {{{Resource.at[YourClass].url("config.properties")}}} 19 | * @tparam T 20 | * The class, trait, or object to look up from. Objects must be written with a `.type` suffix, such as `Resource.at[SomeObject.type]`. 21 | * @return 22 | * A Resource for `T`. 23 | * @see 24 | * [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 25 | */ 26 | inline def at[T]: Resource = ${ Macros.atStaticImpl[T] } 27 | 28 | /** Look up class resource files. 29 | * 30 | * This Resource looks up resources from the given Class, using 31 | * [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]]. For example, if 32 | * `classOf[com.example.ExampleClass]` is given for `clazz`, then resource files will be searched for in the `com/example` folder 33 | * containing `ExampleClass.class`. 34 | * 35 | * If you want to look up resource files relative to the call site instead (that is, you want your class to look up one of its own 36 | * resources), use the `my` method instead. 37 | * 38 | * @example 39 | * {{{Resource.at(Class.forName("your.AppClass")).url("config.properties")}}} 40 | * 41 | * In this example, a file named `config.properties` is expected to appear alongside the file `AppClass.class` in the package `your`. 42 | * @param clazz 43 | * The class to look up from. 44 | * @return 45 | * A Resource for `clazz`. 46 | * @see 47 | * [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 48 | */ 49 | def at(clazz: Class[_]): Resource = new Resource { 50 | override def url(name: String) = Option(clazz.getResource(name)) 51 | } 52 | 53 | /** Look up own resource files. 54 | * 55 | * This Resource looks up resources from the [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html Class]] surrounding the 56 | * call, using [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]]. For 57 | * example, if `my` is called from `com.example.ExampleClass`, then resource files will be searched for in the `com/example` folder 58 | * containing `ExampleClass.class`. 59 | * 60 | * @example 61 | * {{{Resource.my.url("config.properties")}}} 62 | * @return 63 | * A Resource for the call site. 64 | * @see 65 | * [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 66 | */ 67 | inline def my: Resource = ${ Macros.myImpl } 68 | } 69 | 70 | private[files] object Macros { 71 | 72 | def atStaticImpl[T: Type](using qc: Quotes): Expr[Resource] = { 73 | import qc.reflect.* 74 | val tpe = TypeRepr.of[T] 75 | val typeSymbolStr = tpe.typeSymbol.toString 76 | if (typeSymbolStr.startsWith("class ") || typeSymbolStr.startsWith("module class ")) { 77 | val baseClass = tpe.baseClasses.head 78 | return '{ 79 | new Resource { 80 | override def url(name: String) = Option( 81 | Class 82 | .forName(${ 83 | Expr(baseClass.fullName) 84 | }) 85 | .getResource(name) 86 | ) 87 | } 88 | } 89 | } else { 90 | report.errorAndAbort(s"${tpe.show} is not a concrete type") 91 | } 92 | } 93 | 94 | def myImpl(using qc: Quotes): Expr[Resource] = { 95 | import qc.reflect.* 96 | var callee = Symbol.spliceOwner 97 | while (callee != null && callee != Symbol.noSymbol) { 98 | callee = callee.owner 99 | if (callee.isClassDef) { 100 | return '{ 101 | new Resource { 102 | override def url(name: String) = Option( 103 | Class 104 | .forName(${ 105 | Expr(callee.fullName) 106 | }) 107 | .getResource(name) 108 | ) 109 | } 110 | } 111 | } 112 | } 113 | report.errorAndAbort("this location doesn't correspond to a Java class file") 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /src/main/scala/better/files/CloseableIterator.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | /** 4 | * An iterator with a close() function that gets called on iterator exhaustion OR any exceptions during iteration 5 | * Similar in functionality to Geny's self closing generators: https://github.com/com-lihaoyi/geny#self-closing-generators 6 | * Note: 7 | * 1) This assumes "exhaustion" on certain operations like find(), exists(), contains(), indexWhere(), forall() etc. 8 | * e.g. when find() finds an element we assume iterator exhaustion and thus we trigger close 9 | * 10 | * 2) For certain operations that return 2 Iterators e.g. span() and partition(), 11 | * to guarantee closing BOTH iterators must be consumed 12 | * 13 | * 3) Once close() has been invoked hasNext will always return false and next will throw an IllegalStateException 14 | */ 15 | trait CloseableIterator[+A] extends Iterator[A] with AutoCloseable { 16 | override def find(p: A => Boolean) = evalAndClose(super.find(p)) 17 | override def exists(p: A => Boolean) = evalAndClose(super.exists(p)) 18 | override def forall(p: A => Boolean) = evalAndClose(super.forall(p)) 19 | override def takeWhile(p: A => Boolean) = closeInTheEnd(super.takeWhile(p)) 20 | 21 | private[files] val closeOnce = Once(close) 22 | private[files] def isClosed() = closeOnce.isInvoked() 23 | 24 | /** Close at end of iteration */ 25 | private[files] def closeInTheEnd[T](t: Iterator[T]): Iterator[T] = 26 | CloseableIterator(t, closeOnce) 27 | 28 | /** Close this after evaluating f */ 29 | private[files] def evalAndClose[T](f: => T): T = 30 | tryWith(f, closeOnce, finallyClose = true) 31 | 32 | /** Close if there is an exception */ 33 | private[files] def closeIfError[T](f: => T): T = 34 | tryWith(f, closeOnce, finallyClose = false) 35 | 36 | /** Returns a non closing version of this iterator 37 | * This means partial operations like find() and drop() will NOT close the iterator 38 | * 39 | * @param closeInTheEnd If this is true, it will ONLY close the iterator in the end when it has no more elements (default behaviour) 40 | * and not on partial evaluations like find() and take() etc. 41 | * If this is false, iterator will be ALWAYS left open i.e. close() will be NEVER invoked 42 | * and is up to user to close any underlying resources 43 | */ 44 | def nonClosing(closeInTheEnd: Boolean): Iterator[A] 45 | } 46 | 47 | object CloseableIterator { 48 | 49 | /** Make a closeable iterator given an existing iterator and a close function */ 50 | def apply[A](it: Iterator[A], closeFn: () => Unit): CloseableIterator[A] = new CloseableIteratorCompat[A] { self => 51 | override def hasNext = !isClosed() && { 52 | val res = closeIfError(it.hasNext) 53 | if (!res) closeOnce() 54 | res 55 | } 56 | 57 | override def next() = { 58 | if (isClosed()) throw new IllegalStateException("Iterator is already closed") 59 | closeIfError(it.next()) 60 | } 61 | 62 | override def close() = closeFn() 63 | 64 | override def nonClosing(closeInTheEnd: Boolean) = it match { 65 | case c: CloseableIterator[A] => c.nonClosing(closeInTheEnd) 66 | case _ if !closeInTheEnd => it 67 | case _ => 68 | new Iterator[A] { 69 | override def hasNext = self.hasNext 70 | override def next() = self.next() 71 | } 72 | } 73 | } 74 | 75 | def from[A](resource: AutoCloseable)(f: resource.type => Iterator[A]): CloseableIterator[A] = 76 | CloseableIterator(f(resource), resource.close) 77 | } 78 | -------------------------------------------------------------------------------- /src/main/scala/better/files/Dispose.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.util.concurrent.atomic.AtomicBoolean 4 | 5 | import scala.collection.compat._ 6 | import scala.util.Try 7 | 8 | /** A typeclass to denote a disposable resource */ 9 | trait Disposable[-A] { 10 | def dispose(resource: A): Unit 11 | 12 | def disposeSilently(resource: A): Unit = 13 | Try(dispose(resource)) 14 | 15 | } 16 | 17 | object Disposable { 18 | def apply[A](disposeFunction: A => Any): Disposable[A] = 19 | new Disposable[A] { 20 | override def dispose(resource: A) = 21 | disposeFunction(resource) 22 | } 23 | 24 | def apply[A](disposeMethod: => Unit): Disposable[A] = 25 | Disposable(_ => disposeMethod) 26 | 27 | implicit val closableDisposer: Disposable[AutoCloseable] = 28 | Disposable(_.close()) 29 | 30 | implicit def traversableDisposer[A](implicit disposer: Disposable[A]): Disposable[Iterable[A]] = 31 | Disposable(_.foreach(disposer.dispose)) 32 | 33 | val fileDisposer: Disposable[File] = 34 | Disposable(_.delete(swallowIOExceptions = true)) 35 | } 36 | 37 | /** Given a disposable resource, this actually does the disposing */ 38 | class Dispose[A](private[Dispose] val resource: A)(implicit disposer: Disposable[A]) { 39 | private[Dispose] val disposeOnce = Once(() => disposer.dispose(resource)) 40 | 41 | private[Dispose] def withAdditionalDisposeTask[U](f: => U): Dispose[A] = 42 | new Dispose[A](resource)(Disposable { 43 | try { 44 | disposeOnce() 45 | } finally { 46 | f: Unit 47 | } 48 | }) 49 | 50 | /** Apply f to the resource and return it after closing the resource 51 | * If you don't wish to close the resource (e.g. if you are creating an iterator on file contents), use flatMap instead 52 | */ 53 | def apply[B](f: A => B): B = 54 | tryWith(f(resource), disposeOnce, finallyClose = true) 55 | 56 | /** Dispose this resource and return it 57 | * Note: If you are using map followed by get, consider using apply instead 58 | */ 59 | def get(): A = 60 | apply(identity) 61 | 62 | /** This will immediately apply f on the resource and close the resource */ 63 | def foreach[U](f: A => U): Unit = 64 | apply(f): Unit 65 | 66 | /** This will apply f on the resource while it is open */ 67 | def map[B](f: A => B): Dispose[B] = 68 | new Dispose[B](f(resource))(Disposable(disposeOnce())) 69 | 70 | def withFilter(f: A => Boolean): this.type = { 71 | if (!f(resource)) disposeOnce() 72 | this 73 | } 74 | 75 | /** Generate a self closing iterator from this disposable resource */ 76 | def iterator[B](f: A => Iterator[B]): Iterator[B] = 77 | CloseableIterator(f(resource), disposeOnce) 78 | 79 | /** This keeps the resource open during the context of this flatMap and closes when done */ 80 | def flatMap[B, F[_]](f: A => F[B])(implicit fv: Dispose.FlatMap[F]): fv.Output[B] = 81 | fv.apply(this)(f) 82 | } 83 | 84 | object Dispose { 85 | // TODO: rm this hack once we drop Scala 2.11 (see: https://stackoverflow.com/questions/47598333/) 86 | sealed trait FlatMap[-F[_]] { 87 | type Output[_] 88 | def apply[A, B](a: Dispose[A])(f: A => F[B]): Output[B] 89 | } 90 | 91 | object FlatMap { 92 | trait Implicits { 93 | 94 | /** Compose this managed resource with another managed resource closing the outer one after the inner one */ 95 | implicit object dispose extends FlatMap[Dispose] { 96 | override type Output[X] = Dispose[X] 97 | override def apply[A, B](m: Dispose[A])(f: A => Dispose[B]) = 98 | f(m.resource).withAdditionalDisposeTask(m.disposeOnce()) 99 | } 100 | 101 | /** Use the current managed resource as a generator needed to create another sequence */ 102 | implicit object iterable extends FlatMap[IterableOnce] { 103 | override type Output[X] = Iterator[X] 104 | override def apply[A, B](m: Dispose[A])(f: A => IterableOnce[B]) = 105 | m.iterator(f(_).iterator) 106 | } 107 | } 108 | } 109 | } 110 | 111 | /** Converts a given function to something that can be called only once */ 112 | private[files] case class Once(f: () => Unit) extends (() => Unit) { 113 | private[this] val flag = new AtomicBoolean(false) 114 | 115 | override def apply() = if (!flag.getAndSet(true)) f() 116 | 117 | def isInvoked(): Boolean = flag.get() 118 | } 119 | -------------------------------------------------------------------------------- /src/main/scala/better/files/Dsl.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.nio.charset.Charset 4 | import java.nio.file.attribute.{PosixFileAttributes, PosixFilePermission, PosixFilePermissions} 5 | import java.util.zip.Deflater 6 | 7 | import scala.jdk.CollectionConverters._ 8 | 9 | /** Do file ops using a UNIX command line DSL */ 10 | object Dsl { 11 | def ~ : File = 12 | File.home 13 | 14 | def pwd: File = 15 | File.currentWorkingDirectory 16 | 17 | def cwd: File = 18 | pwd 19 | 20 | val `..` : File => File = 21 | _.parent 22 | 23 | val `.` : File => File = 24 | identity 25 | 26 | /** Adds some symbolic operations to file */ 27 | implicit class SymbolicOperations(val file: File) { 28 | 29 | /** Allows navigation up e.g. file / .. / .. */ 30 | def /(f: File => File): File = 31 | f(file) 32 | 33 | def <<(line: String)(implicit charset: Charset = DefaultCharset): file.type = 34 | file.appendLine(line, charset) 35 | 36 | def >>:(line: String)(implicit charset: Charset = DefaultCharset): file.type = 37 | file.appendLine(line, charset) 38 | 39 | def <( 40 | text: String, 41 | openOptions: File.OpenOptions = File.OpenOptions.default, 42 | charset: Charset = DefaultCharset 43 | ): file.type = 44 | file.write(text, openOptions, charset) 45 | 46 | def `>:`( 47 | text: String, 48 | openOptions: File.OpenOptions = File.OpenOptions.default, 49 | charset: Charset = DefaultCharset 50 | ): file.type = 51 | file.write(text, openOptions, charset) 52 | 53 | def `!`(implicit charset: Charset = DefaultCharset): String = 54 | file.contentAsString(charset) 55 | 56 | def `===`(that: File): Boolean = 57 | file.isSameContentAs(that) 58 | 59 | def !==(that: File): Boolean = 60 | !(file === that) 61 | } 62 | 63 | def cp(from: File, to: File): File = { 64 | if (to.isDirectory()) { 65 | from.copyToDirectory(to) 66 | } else { 67 | from.copyTo(to, overwrite = true) 68 | } 69 | } 70 | 71 | def mv(from: File, to: File): File = { 72 | if (to.isDirectory()) { 73 | from.moveToDirectory(to) 74 | } else { 75 | from.moveTo(to, File.CopyOptions(overwrite = true)) 76 | } 77 | } 78 | 79 | def rm(file: File): File = 80 | file.delete(swallowIOExceptions = true) 81 | 82 | def del(file: File): File = 83 | rm(file) 84 | 85 | def ln(file1: File, file2: File): File = 86 | file1.linkTo(file2) 87 | 88 | def ln_s(file1: File, file2: File): File = 89 | file1.symbolicLinkTo(file2) 90 | 91 | def cat(files: File*): Seq[Iterator[Byte]] = 92 | files.map(_.bytes) 93 | 94 | def ls(file: File): Iterator[File] = 95 | file.list 96 | 97 | def dir(file: File): Iterator[File] = 98 | ls(file) 99 | 100 | def ls_r(file: File): Iterator[File] = 101 | file.listRecursively() 102 | 103 | def touch(file: File): File = 104 | file.touch() 105 | 106 | def mkdir(file: File): File = 107 | file.createDirectory() 108 | 109 | def md5(file: File): String = 110 | file.md5() 111 | 112 | def sha1(file: File): String = 113 | file.sha1() 114 | 115 | def sha256(file: File): String = 116 | file.sha256() 117 | 118 | def sha512(file: File): String = 119 | file.sha512() 120 | 121 | def mkdirs(file: File): File = 122 | file.createDirectories() 123 | 124 | def chown(owner: String, file: File): File = 125 | file.setOwner(owner) 126 | 127 | def chgrp(group: String, file: File): File = 128 | file.setGroup(group) 129 | 130 | /** Update permission of this file 131 | * 132 | * @param permissions Must be 9 character POSIX permission representation e.g. "rwxr-x---" 133 | */ 134 | def chmod(permissions: String, file: File): File = 135 | file.setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet) 136 | 137 | def chmod_+(permission: PosixFilePermission, file: File): File = 138 | file.addPermission(permission) 139 | 140 | def chmod_-(permission: PosixFilePermission, file: File): File = 141 | file.removePermission(permission) 142 | 143 | def stat(file: File): PosixFileAttributes = 144 | file.posixAttributes() 145 | 146 | def unzip(zipFile: File)(destination: File, charset: Charset = DefaultCharset): File = // TODO: return destination.type here? 147 | zipFile.unzipTo(destination, charset = charset) 148 | 149 | def zip( 150 | files: File* 151 | )(destination: File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION, charset: Charset = DefaultCharset): destination.type = 152 | destination.zipIn(files.iterator, compressionLevel, charset) 153 | 154 | def ungzip(gzipFile: File)(destination: File): File = 155 | gzipFile.unGzipTo(destination) 156 | 157 | def gzip(file: File)(destination: File): File = 158 | file.gzipTo(destination) 159 | } 160 | -------------------------------------------------------------------------------- /src/main/scala/better/files/FileMonitor.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.nio.file._ 4 | 5 | import scala.concurrent.{blocking, ExecutionContext} 6 | import scala.util.Try 7 | import scala.util.control.NonFatal 8 | 9 | /** Implementation of File.Monitor */ 10 | abstract class FileMonitor(val root: File, maxDepth: Int) extends File.Monitor { 11 | protected[this] val service = root.newWatchService 12 | 13 | def this(root: File, recursive: Boolean = true) = this(root, if (recursive) Int.MaxValue else 0) 14 | 15 | /** If watching non-directory, don't react to siblings */ 16 | protected[this] def reactTo(target: File) = root.isDirectory() || root.isSamePathAs(target) 17 | 18 | protected[this] def process(key: WatchKey) = { 19 | val path = key.watchable().asInstanceOf[Path] 20 | 21 | import scala.jdk.CollectionConverters._ 22 | key.pollEvents().asScala foreach { 23 | case event: WatchEvent[Path] @unchecked if (event.context() != null) => 24 | val target: File = path.resolve(event.context()) 25 | if (reactTo(target)) { 26 | if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE) { 27 | val depth = root.relativize(target).getNameCount 28 | watch(target, (maxDepth - depth) max 0) // auto-watch new files in a directory 29 | } 30 | onEvent(event.kind(), target, event.count()) 31 | } 32 | case event => if (reactTo(path)) onUnknownEvent(event) 33 | } 34 | key.reset() 35 | } 36 | 37 | protected[this] def watch(file: File, depth: Int): Unit = { 38 | def toWatch: Iterator[File] = 39 | if (file.isDirectory()) { 40 | file.walk(depth).filter(f => f.isDirectory() && f.exists()) 41 | } else { 42 | when(file.exists())(file.parent).iterator // There is no way to watch a regular file; so watch its parent instead 43 | } 44 | try { 45 | toWatch.foreach(f => Try[Unit](f.register(service)).recover { case e => onException(e) }.get) 46 | } catch { 47 | case NonFatal(e) => onException(e) 48 | } 49 | } 50 | 51 | override def start()(implicit executionContext: ExecutionContext) = { 52 | watch(root, maxDepth) 53 | executionContext.execute(new Runnable { 54 | override def run() = blocking { Iterator.continually(service.take()).foreach(process) } 55 | }) 56 | } 57 | 58 | override def close() = service.close() 59 | 60 | // Although this class is abstract, we provide noop implementations so user can choose to implement a subset of these 61 | override def onCreate(file: File, count: Int) = {} 62 | override def onModify(file: File, count: Int) = {} 63 | override def onDelete(file: File, count: Int) = {} 64 | override def onUnknownEvent(event: WatchEvent[_]) = {} 65 | override def onException(exception: Throwable) = {} 66 | } 67 | -------------------------------------------------------------------------------- /src/main/scala/better/files/Implicits.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io.{File => JFile, _} 4 | import java.net.{URI, URL} 5 | import java.nio.MappedByteBuffer 6 | import java.nio.channels.FileChannel 7 | import java.nio.charset.Charset 8 | import java.nio.file.{Path, PathMatcher} 9 | import java.security.{DigestInputStream, DigestOutputStream, MessageDigest} 10 | import java.util.StringTokenizer 11 | import java.util.stream.{Stream => JStream} 12 | import java.util.zip._ 13 | 14 | import scala.annotation.tailrec 15 | import scala.collection.compat._ 16 | import scala.jdk.CollectionConverters._ 17 | 18 | /** Container for various implicits */ 19 | trait Implicits extends Dispose.FlatMap.Implicits with Scanner.Read.Implicits with Scanner.Source.Implicits { 20 | 21 | implicit class StringInterpolations(sc: StringContext) { 22 | def file(args: Any*): File = 23 | value(args).toFile 24 | 25 | private[this] def value(args: Seq[Any]) = 26 | sc.s(args: _*) 27 | } 28 | 29 | implicit class StringExtensions(str: String) { 30 | def toFile: File = 31 | File(str) 32 | 33 | def /(child: String): File = 34 | toFile / child 35 | 36 | def inputStream(charset: Charset = DefaultCharset): InputStream = 37 | new ByteArrayInputStream(str.getBytes(charset)) 38 | 39 | def reader: Reader = 40 | new StringReader(str) 41 | } 42 | 43 | implicit class FileExtensions(file: JFile) { 44 | def toScala: File = 45 | File(file.getPath) 46 | } 47 | 48 | implicit class URLExtensions(url: URL) { 49 | def isFile: Boolean = 50 | url != null && url.toURI.isFile 51 | 52 | def toFileUnsafe: File = 53 | toFile.getOrElse(throw new IllegalArgumentException(s"Not a file: $url")) 54 | 55 | def toFile: Option[File] = 56 | when(isFile)(File(url)) 57 | } 58 | 59 | implicit class URIExtensions(uri: URI) { 60 | def isFile: Boolean = 61 | uri != null && uri.getScheme() == "file" 62 | 63 | def toFileUnsafe: File = 64 | toFile.getOrElse(throw new IllegalArgumentException(s"Not a file: $uri")) 65 | 66 | def toFile: Option[File] = 67 | when(isFile)(File(uri)) 68 | } 69 | 70 | implicit class SymbolExtensions(symbol: Symbol) { 71 | def /(child: Symbol): File = 72 | File(symbol.name) / child 73 | } 74 | 75 | implicit class IteratorExtensions[A](it: Iterator[A]) { 76 | def withHasNext(f: => Boolean): Iterator[A] = 77 | new Iterator[A] { 78 | override def hasNext = f && it.hasNext 79 | override def next() = it.next() 80 | } 81 | 82 | /** Returns a non closing version of this iterator 83 | * This means partial operations like find() and drop() will NOT close the iterator 84 | * This was the behaviour prior to v4.0.0 - see: https://github.com/pathikrit/better-files/pull/587 85 | * @param closeInTheEnd If this is true, it will ONLY close the iterator in the end when it has no more elements (default behaviour) 86 | * and not on partial evaluations like find() and take() 87 | * If this is false, iterator will be left open EVEN when it has no more elements 88 | */ 89 | def nonClosing(closeInTheEnd: Boolean = true): Iterator[A] = it match { 90 | case c: CloseableIterator[A] => c.nonClosing(closeInTheEnd) 91 | case _ => it 92 | } 93 | } 94 | 95 | implicit class InputStreamExtensions(in: InputStream) { 96 | def pipeTo(out: OutputStream, bufferSize: Int = DefaultBufferSize): out.type = 97 | pipeTo(out, Array.ofDim[Byte](bufferSize)) 98 | 99 | /** Pipe an input stream to an output stream using a byte buffer */ 100 | @tailrec final def pipeTo(out: OutputStream, buffer: Array[Byte]): out.type = { 101 | val n = in.read(buffer) 102 | if (n > 0) { 103 | out.write(buffer, 0, n) 104 | pipeTo(out, buffer) 105 | } else { 106 | out 107 | } 108 | } 109 | 110 | def asString( 111 | closeStream: Boolean = true, 112 | bufferSize: Int = DefaultBufferSize, 113 | charset: Charset = DefaultCharset 114 | ): String = { 115 | try { 116 | new ByteArrayOutputStream(bufferSize).autoClosed 117 | .apply(pipeTo(_, bufferSize = bufferSize).toString(charset.displayName())) 118 | } finally { 119 | if (closeStream) in.close() 120 | } 121 | } 122 | 123 | def withMessageDigest(digest: MessageDigest): DigestInputStream = 124 | new DigestInputStream(in, digest) 125 | 126 | def md5: DigestInputStream = 127 | withMessageDigest("MD5") 128 | 129 | def sha1: DigestInputStream = 130 | withMessageDigest("SHA-1") 131 | 132 | def sha256: DigestInputStream = 133 | withMessageDigest("SHA-256") 134 | 135 | def sha512: DigestInputStream = 136 | withMessageDigest("SHA-512") 137 | 138 | def crc32: CheckedInputStream = 139 | withChecksum(new CRC32) 140 | 141 | def adler32: CheckedInputStream = 142 | withChecksum(new Adler32) 143 | 144 | def withChecksum(checksum: Checksum): CheckedInputStream = 145 | new CheckedInputStream(in, checksum) 146 | 147 | def buffered: BufferedInputStream = 148 | new BufferedInputStream(in) 149 | 150 | def buffered(bufferSize: Int): BufferedInputStream = 151 | new BufferedInputStream(in, bufferSize) 152 | 153 | def asGzipInputStream(bufferSize: Int = DefaultBufferSize): GZIPInputStream = 154 | new GZIPInputStream(in, bufferSize) 155 | 156 | def asZipInputStream(charset: Charset = DefaultCharset): ZipInputStream = 157 | new ZipInputStream(in, charset) 158 | 159 | /** If bufferSize is set to less than or equal to 0, we don't buffer */ 160 | def asObjectInputStream(bufferSize: Int = DefaultBufferSize): ObjectInputStream = 161 | new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize)) 162 | 163 | /** @param bufferSize If bufferSize is set to less than or equal to 0, we don't buffer 164 | * Code adapted from: 165 | * https://github.com/apache/commons-io/blob/master/src/main/java/org/apache/commons/io/input/ClassLoaderObjectInputStream.java 166 | * 167 | * @return A special ObjectInputStream that loads a class based on a specified ClassLoader rather than the default 168 | * This is useful in dynamic container environments. 169 | */ 170 | def asObjectInputStreamUsingClassLoader( 171 | classLoader: ClassLoader = getClass.getClassLoader, 172 | bufferSize: Int = DefaultBufferSize 173 | ): ObjectInputStream = 174 | new ObjectInputStream(if (bufferSize <= 0) in else buffered(bufferSize)) { 175 | override protected def resolveClass(objectStreamClass: ObjectStreamClass): Class[_] = 176 | try { 177 | Class.forName(objectStreamClass.getName, false, classLoader) 178 | } catch { 179 | case _: ClassNotFoundException => super.resolveClass(objectStreamClass) 180 | } 181 | } 182 | 183 | def reader(charset: Charset = DefaultCharset): InputStreamReader = 184 | new InputStreamReader(in, charset) 185 | 186 | def lines(charset: Charset = DefaultCharset): Iterator[String] = 187 | reader(charset).buffered.lines().toAutoClosedIterator 188 | 189 | def bytes: Iterator[Byte] = 190 | in.autoClosed.iterator(res => eofReader(res.read()).map(_.toByte)) 191 | 192 | def byteArray: Array[Byte] = { 193 | for { 194 | _ <- in.autoClosed 195 | out <- new ByteArrayOutputStream().autoClosed 196 | } yield pipeTo(out).toByteArray 197 | }.get() 198 | } 199 | 200 | implicit class DigestInputStreamExtensions(in: DigestInputStream) { 201 | 202 | /** Exhausts the stream and computes the digest and closes the stream */ 203 | def digest(drainTo: OutputStream = NullOutputStream): Array[Byte] = { 204 | in.autoClosed.foreach(_.pipeTo(drainTo)) 205 | in.getMessageDigest.digest() 206 | } 207 | 208 | /** Exhausts the stream and computes the digest as hex and closes the stream */ 209 | def hexDigest(drainTo: OutputStream = NullOutputStream): String = 210 | toHex(digest(drainTo)) 211 | } 212 | 213 | implicit class OutputStreamExtensions(val out: OutputStream) { 214 | def buffered: BufferedOutputStream = 215 | new BufferedOutputStream(out) 216 | 217 | def buffered(bufferSize: Int): BufferedOutputStream = 218 | new BufferedOutputStream(out, bufferSize) 219 | 220 | def asGzipOutputStream(bufferSize: Int = DefaultBufferSize, syncFlush: Boolean = false): GZIPOutputStream = 221 | new GZIPOutputStream(out, bufferSize, syncFlush) 222 | 223 | def withMessageDigest(digest: MessageDigest): DigestOutputStream = 224 | new DigestOutputStream(out, digest) 225 | 226 | def withChecksum(checksum: Checksum): CheckedOutputStream = 227 | new CheckedOutputStream(out, checksum) 228 | 229 | def writer(charset: Charset = DefaultCharset): OutputStreamWriter = 230 | new OutputStreamWriter(out, charset) 231 | 232 | def printWriter(autoFlush: Boolean = false): PrintWriter = 233 | new PrintWriter(out, autoFlush) 234 | 235 | def write(bytes: Iterator[Byte], bufferSize: Int = DefaultBufferSize): out.type = { 236 | bytes.grouped(bufferSize).foreach(buffer => out.write(buffer.toArray)) 237 | out.flush() 238 | out 239 | } 240 | 241 | def md5: DigestOutputStream = 242 | withMessageDigest("MD5") 243 | 244 | def sha1: DigestOutputStream = 245 | withMessageDigest("SHA-1") 246 | 247 | def sha256: DigestOutputStream = 248 | withMessageDigest("SHA-256") 249 | 250 | def sha512: DigestOutputStream = 251 | withMessageDigest("SHA-512") 252 | 253 | def crc32: CheckedOutputStream = 254 | withChecksum(new CRC32) 255 | 256 | def adler32: CheckedOutputStream = 257 | withChecksum(new Adler32) 258 | 259 | def writeAndClose(str: String, charset: Charset = DefaultCharset): Unit = 260 | out.writer(charset).autoClosed.foreach(_.write(str)) 261 | 262 | def tee(out2: OutputStream): OutputStream = 263 | new TeeOutputStream(out, out2) 264 | 265 | /** If bufferSize is set to less than or equal to 0, we don't buffer */ 266 | def asObjectOutputStream(bufferSize: Int = DefaultBufferSize): ObjectOutputStream = 267 | new ObjectOutputStream(if (bufferSize <= 0) out else buffered(bufferSize)) 268 | 269 | def asZipOutputStream(charset: Charset): ZipOutputStream = 270 | new ZipOutputStream(out, charset) 271 | } 272 | 273 | implicit class PrintWriterExtensions(pw: PrintWriter) { 274 | def printLines(lines: IterableOnce[_]): PrintWriter = { 275 | lines.iterator.foreach(pw.println) 276 | pw 277 | } 278 | } 279 | 280 | implicit class ReaderExtensions(reader: Reader) { 281 | def buffered: BufferedReader = 282 | new BufferedReader(reader) 283 | 284 | def toInputStream(bufferSize: Int = DefaultBufferSize, charset: Charset = DefaultCharset): InputStream = 285 | new ReaderInputStream(reader, bufferSize, charset) 286 | 287 | def chars: Iterator[Char] = 288 | reader.autoClosed.iterator(res => eofReader(res.read()).map(_.toChar)) 289 | } 290 | 291 | implicit class BufferedReaderExtensions(reader: BufferedReader) { 292 | def tokens(splitter: StringSplitter = StringSplitter.Default): Iterator[String] = 293 | reader.lines().toAutoClosedIterator.flatMap(splitter.split) 294 | } 295 | 296 | implicit class WriterExtensions(writer: Writer) { 297 | def buffered: BufferedWriter = 298 | new BufferedWriter(writer) 299 | 300 | def outputstream( 301 | bufferSize: Int = DefaultBufferSize, 302 | flushImmediately: Boolean = false, 303 | charset: Charset = DefaultCharset 304 | ): OutputStream = 305 | new WriterOutputStream(writer, bufferSize, flushImmediately, charset) 306 | } 307 | 308 | implicit class FileChannelExtensions(fc: FileChannel) { 309 | def toMappedByteBuffer: MappedByteBuffer = 310 | fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()) 311 | } 312 | 313 | implicit class PathMatcherExtensions(matcher: PathMatcher) { 314 | def matches(file: File, maxDepth: Int, visitOptions: File.VisitOptions = File.VisitOptions.default): Iterator[File] = 315 | file.collectChildren(child => matcher.matches(child.path), maxDepth, visitOptions) 316 | } 317 | 318 | implicit class ObjectInputStreamExtensions(ois: ObjectInputStream) { 319 | def deserialize[A]: A = 320 | ois.readObject().asInstanceOf[A] 321 | } 322 | 323 | implicit class ObjectOutputStreamExtensions(val oos: ObjectOutputStream) { 324 | def serialize(obj: Serializable): oos.type = { 325 | oos.writeObject(obj) 326 | oos 327 | } 328 | } 329 | 330 | implicit class ZipOutputStreamExtensions(val out: ZipOutputStream) { 331 | 332 | /** Correctly set the compression level 333 | * See: http://stackoverflow.com/questions/1206970/creating-zip-using-zip-utility 334 | */ 335 | def withCompressionLevel(level: Int): out.type = { 336 | out.setLevel(level) 337 | if (level == Deflater.NO_COMPRESSION) out.setMethod(ZipOutputStream.DEFLATED) 338 | out 339 | } 340 | 341 | def add(file: File, name: String): out.type = { 342 | val relativeName = name.stripSuffix(file.fileSystem.getSeparator) 343 | val entryName = 344 | if (file.isDirectory()) s"$relativeName/" else relativeName // make sure to end directories in ZipEntry with "/" 345 | out.putNextEntry(new ZipEntry(entryName)) 346 | if (file.isRegularFile()) file.inputStream().foreach(_.pipeTo(out)) 347 | out.closeEntry() 348 | out 349 | } 350 | 351 | def +=(file: File): out.type = 352 | add(file, file.name) 353 | } 354 | 355 | implicit class ZipInputStreamExtensions(val in: ZipInputStream) { 356 | 357 | /** Apply `f` on each ZipEntry in the archive, closing the entry after `f` has been applied. 358 | * 359 | * @param f The function to apply to each ZipEntry. Can fail if it returns a lazy value, 360 | * like Iterator, as the entry will have been closed before the lazy value is evaluated. 361 | */ 362 | def mapEntries[A](f: ZipEntry => A): Iterator[A] = 363 | new Iterator[A] { 364 | private[this] var entry = in.getNextEntry 365 | 366 | override def hasNext = entry != null 367 | 368 | override def next() = { 369 | try { 370 | f(entry) 371 | } finally { 372 | try { 373 | in.closeEntry() 374 | } finally { 375 | entry = in.getNextEntry 376 | } 377 | } 378 | } 379 | } 380 | 381 | /** Apply `f` to the ZipInputStream for every entry in the archive. 382 | * @param f The function to apply to the ZipInputStream. Can fail if it returns a lazy value, 383 | * like Iterator, as the the entry will have been closed before the lazy value is evaluated. 384 | */ 385 | def foldMap[A](f: ZipInputStream => A): Iterator[A] = 386 | mapEntries(_ => f(in)) 387 | } 388 | 389 | implicit class ZipEntryExtensions(val entry: ZipEntry) { 390 | 391 | /** Extract this ZipEntry under this rootDir 392 | * 393 | * @param rootDir directory under which this entry is extracted 394 | * @param inputStream use this inputStream when this entry is a file 395 | * @return the extracted file 396 | */ 397 | def extractTo(rootDir: File, inputStream: => InputStream): File = { 398 | val entryName = entry.getName.replace("\\", "/") // see https://github.com/pathikrit/better-files/issues/262 399 | val child = rootDir.createChild(entryName, asDirectory = entry.isDirectory, createParents = true) 400 | if (!entry.isDirectory) child.outputStream().foreach(inputStream.pipeTo(_)) 401 | child 402 | } 403 | } 404 | 405 | implicit class DisposeableExtensions[A: Disposable](resource: A) { 406 | 407 | /** Lightweight automatic resource management 408 | * Closes the resource when done e.g. 409 | *
410 |       * for {
411 |       * in <- file.newInputStream.autoClosed
412 |       * } in.write(bytes)
413 |       * // in is closed now
414 |       * 
415 | * 416 | * @return 417 | */ 418 | def autoClosed: Dispose[A] = 419 | new Dispose(resource) 420 | } 421 | 422 | implicit class JStreamExtensions[A](stream: JStream[A]) { 423 | 424 | /** Convert this stream to a CloseableIterator @see CloseableIterator */ 425 | def toAutoClosedIterator: Iterator[A] = 426 | CloseableIterator.from(stream)(_.iterator().asScala) 427 | } 428 | 429 | private[files] implicit class OrderingExtensions[A](order: Ordering[A]) { 430 | def andThenBy(order2: Ordering[A]): Ordering[A] = 431 | Ordering.comparatorToOrdering(order.thenComparing(order2)) 432 | } 433 | 434 | implicit def stringToMessageDigest(algorithmName: String): MessageDigest = 435 | MessageDigest.getInstance(algorithmName) 436 | 437 | implicit def stringToCharset(charsetName: String): Charset = 438 | Charset.forName(charsetName) 439 | 440 | implicit def tokenizerToIterator(s: StringTokenizer): Iterator[String] = 441 | Iterator.continually(s.nextToken()).withHasNext(s.hasMoreTokens) 442 | 443 | // implicit def posixPermissionToFileAttribute(perm: PosixFilePermission) = 444 | // PosixFilePermissions.asFileAttribute(Set(perm)) 445 | 446 | private[files] implicit def pathStreamToFiles(files: JStream[Path]): Iterator[File] = 447 | files.toAutoClosedIterator.map(File.apply) 448 | } 449 | -------------------------------------------------------------------------------- /src/main/scala/better/files/ReaderInputStream.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io.{InputStream, Reader} 4 | import java.nio.{ByteBuffer, CharBuffer} 5 | import java.nio.charset.{Charset, CharsetEncoder, CoderResult, CodingErrorAction} 6 | 7 | import scala.annotation.tailrec 8 | 9 | /** Code ported from Java to Scala: 10 | * https://github.com/apache/commons-io/blob/c0eb48f7e83987c5ed112b82f0d651aff5149ae4/src/main/java/org/apache/commons/io/input/ReaderInputStream.java 11 | */ 12 | class ReaderInputStream(reader: Reader, encoder: CharsetEncoder, bufferSize: Int) extends InputStream { 13 | 14 | def this(reader: Reader, bufferSize: Int = DefaultBufferSize, charset: Charset = DefaultCharset) = 15 | this( 16 | reader = reader, 17 | encoder = charset.newEncoder.onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE), 18 | bufferSize = bufferSize 19 | ) 20 | 21 | /** CharBuffer used as input for the decoder. It should be reasonably 22 | * large as we read data from the underlying Reader into this buffer. 23 | */ 24 | private[this] val encoderIn = CharBuffer.allocate(bufferSize).flip().asInstanceOf[CharBuffer] 25 | 26 | /** ByteBuffer used as output for the decoder. This buffer can be small 27 | * as it is only used to transfer data from the decoder to the buffer provided by the caller. 28 | */ 29 | private[this] val encoderOut = ByteBuffer.allocate(bufferSize >> 4).flip().asInstanceOf[ByteBuffer] 30 | 31 | private[this] var lastCoderResult = CoderResult.UNDERFLOW 32 | private[this] var endOfInput = false 33 | 34 | private[this] def fillBuffer(): Unit = { 35 | assert(!endOfInput) 36 | if (lastCoderResult.isUnderflow) { 37 | val position = encoderIn.compact().position() 38 | // We don't use Reader#read(CharBuffer) here because it is more efficient to write directly to the underlying char array 39 | // since the default implementation copies data to a temporary char array anyway 40 | reader.read(encoderIn.array, position, encoderIn.remaining) match { 41 | case EOF => endOfInput = true 42 | case c => encoderIn.position(position + c) 43 | } 44 | encoderIn.flip() 45 | } 46 | lastCoderResult = encoder.encode(encoderIn, encoderOut.compact(), endOfInput) 47 | encoderOut.flip() 48 | } 49 | 50 | override def read(b: Array[Byte], off: Int, len: Int) = { 51 | if (len < 0 || off < 0 || (off + len) > b.length) 52 | throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len) 53 | if (len == 0) { 54 | 0 // Always return 0 if len == 0 55 | } else { 56 | var read = 0 57 | @tailrec def loop(off: Int, len: Int): Unit = 58 | if (len > 0) { 59 | if (encoderOut.hasRemaining) { 60 | val c = encoderOut.remaining min len 61 | encoderOut.get(b, off, c) 62 | read += c 63 | loop(off + c, len - c) 64 | } else if (!endOfInput) { 65 | fillBuffer() 66 | loop(off, len) 67 | } 68 | } 69 | loop(off, len) 70 | if (read == 0 && endOfInput) EOF else read 71 | } 72 | } 73 | 74 | @tailrec final override def read() = { 75 | if (encoderOut.hasRemaining) { 76 | encoderOut.get & 0xff 77 | } else if (endOfInput) { 78 | EOF 79 | } else { 80 | fillBuffer() 81 | read() 82 | } 83 | } 84 | 85 | override def close() = reader.close() 86 | } 87 | -------------------------------------------------------------------------------- /src/main/scala/better/files/Resource.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io.{InputStream, IOException} 4 | import java.net.URL 5 | import java.nio.charset.Charset 6 | 7 | /** Finds and loads [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) class resources]] 8 | * or [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html#getResource(java.lang.String) class loader resources]]. 9 | * 10 | * The default implementation of this trait is the [[Resource]] object, which looks up resources 11 | * using the [[https://docs.oracle.com/javase/10/docs/api/java/lang/Thread.html#currentThread() current thread]]'s [[https://docs.oracle.com/javase/10/docs/api/java/lang/Thread.html#getContextClassLoader() context class loader]]. 12 | * The Resource object also offers several other Resource implementations, 13 | * through its methods `at`, `from`, and `my`. `at` searches from a [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html Class]], 14 | * `from` searches from a [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html ClassLoader]], 15 | * and `my` searches from the class, trait, or object surrounding the call. 16 | * 17 | * @example {{{ 18 | * // Look up the config.properties file for this class or object. 19 | * Resource.my.asStream("config.properties") 20 | * 21 | * // Find logging.properties (in the root package) somewhere on the classpath. 22 | * Resource.url("logging.properties") 23 | * }}} 24 | * 25 | * @see [[Resource]] 26 | * @see [[https://stackoverflow.com/questions/676250/different-ways-of-loading-a-file-as-an-inputstream Different ways of loading a file as an InputStream]] 27 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 28 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html#getResource(java.lang.String) ClassLoader#getResource]] 29 | */ 30 | trait Resource { 31 | 32 | /** Look up a resource by name, and open an [[https://docs.oracle.com/javase/10/docs/api/java/io/InputStream.html InputStream]] for reading it. 33 | * 34 | * @param name Name of the resource to search for. 35 | * @return InputStream for reading the found resource, if a resource was found. 36 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResourceAsStream(java.lang.String) Class#getResourceAsStream]] 37 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html#getResourceAsStream(java.lang.String) ClassLoader#getResourceAsStream]] 38 | */ 39 | @throws[IOException] 40 | def asStream(name: String): Option[InputStream] = 41 | url(name).map(_.openStream()) 42 | 43 | /** Same as asStream but throws a NoSuchElementException if resource is not found 44 | */ 45 | def getAsStream(name: String): InputStream = 46 | asStream(name).getOrElse(Resource.notFound(name)) 47 | 48 | def asString( 49 | name: String, 50 | bufferSize: Int = DefaultBufferSize, 51 | charset: Charset = DefaultCharset 52 | ): Option[String] = 53 | asStream(name).map(_.asString(bufferSize = bufferSize, charset = charset)) 54 | 55 | def getAsString( 56 | name: String, 57 | bufferSize: Int = DefaultBufferSize, 58 | charset: Charset = DefaultCharset 59 | ): String = 60 | asString(name, bufferSize, charset).getOrElse(Resource.notFound(name)) 61 | 62 | /** Look up a resource by name, and get its [[https://docs.oracle.com/javase/10/docs/api/java/net/URL.html URL]]. 63 | * 64 | * @param name Name of the resource to search for. 65 | * @return URL of the requested resource. If the resource could not be found or is not accessible, returns None. 66 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 67 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html#getResource(java.lang.String) ClassLoader#getResource]] 68 | */ 69 | def url(name: String): Option[URL] 70 | 71 | /** Get URL of given resource 72 | * A default argument of empty string is provided to conveniently get the root resource URL using {{Resource.getUrl()}} 73 | * 74 | * @param name 75 | * @return 76 | */ 77 | def getUrl(name: String = ""): URL = 78 | url(name).getOrElse(Resource.notFound(name)) 79 | } 80 | 81 | /** Implementations of [[Resource]]. 82 | * 83 | * This object itself is a Resource uses the [[https://docs.oracle.com/javase/10/docs/api/java/lang/Thread.html#currentThread() current thread]]'s 84 | * [[https://docs.oracle.com/javase/10/docs/api/java/lang/Thread.html#getContextClassLoader() context class loader]]. 85 | * It also creates Resources with different lookup behavior, using the methods `at`, `from`, and `my`. `at` searches 86 | * rom a [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html Class]], `from` searches 87 | * from a different [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html ClassLoader]], 88 | * and `my` searches from the class, trait, or object surrounding the call. 89 | * 90 | * @see [[Resource]] 91 | * @see [[https://stackoverflow.com/questions/676250/different-ways-of-loading-a-file-as-an-inputstream Different ways of loading a file as an InputStream]] 92 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/Class.html#getResource(java.lang.String) Class#getResource]] 93 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html#getResource(java.lang.String) ClassLoader#getResource]] 94 | */ 95 | object Resource extends Resource with ResourceCompat { 96 | 97 | @throws[NoSuchElementException] 98 | def notFound(name: String): Nothing = 99 | throw new NoSuchElementException(s"Could not find resource=${name}") 100 | 101 | override def url(name: String): Option[URL] = 102 | from(Thread.currentThread.getContextClassLoader).url(name) 103 | 104 | /** Look up resource files using the specified ClassLoader. 105 | * 106 | * This Resource looks up resources from a specific ClassLoader. Like [[Resource the default Resource]], resource names are relative to the root package. 107 | * 108 | * @example {{{ Resource.from(appClassLoader).url("com/example/config.properties") }}} 109 | * @param cl ClassLoader to look up resources from. 110 | * @return A Resource that uses the supplied ClassLoader. 111 | * @see [[https://docs.oracle.com/javase/10/docs/api/java/lang/ClassLoader.html#getResource(java.lang.String) ClassLoader#getResource]] 112 | */ 113 | def from(cl: ClassLoader): Resource = 114 | new Resource { 115 | override def url(name: String): Option[URL] = 116 | Option(cl.getResource(name)) 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/main/scala/better/files/Scanner.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io._ 4 | import java.nio.charset.Charset 5 | import java.time.format.DateTimeFormatter 6 | import java.util.StringTokenizer 7 | 8 | import scala.collection.compat.IterableOnce 9 | 10 | trait Scanner extends Iterator[String] with AutoCloseable { 11 | def lineNumber(): Int 12 | 13 | def next[A](implicit scan: Scannable[A]): A = scan(this) 14 | 15 | def nextLine(): String 16 | 17 | def lines: Iterator[String] = Iterator.continually(nextLine()).withHasNext(hasNext) 18 | } 19 | 20 | /** Faster, safer and more idiomatic Scala replacement for java.util.Scanner 21 | * See: http://codeforces.com/blog/entry/7018 22 | */ 23 | object Scanner { 24 | val stdin: Scanner = Scanner(System.in) 25 | 26 | trait Source[A] { self => 27 | def apply(a: A): LineNumberReader 28 | def contramap[B](f: B => A): Source[B] = 29 | new Source[B] { 30 | override def apply(b: B) = self.apply(f(b)) 31 | } 32 | } 33 | 34 | object Source { 35 | def apply[A](f: A => LineNumberReader): Source[A] = 36 | new Source[A] { 37 | override def apply(a: A) = f(a) 38 | } 39 | 40 | trait Implicits { 41 | implicit val lineNumberReaderSource: Source[LineNumberReader] = Source(identity) 42 | implicit val bufferedReaderSource: Source[BufferedReader] = 43 | lineNumberReaderSource.contramap(new LineNumberReader(_)) 44 | implicit val readerSource: Source[Reader] = bufferedReaderSource.contramap(_.buffered) 45 | implicit val stringSource: Source[String] = readerSource.contramap(new StringReader(_)) 46 | 47 | implicit def inputstreamSource(implicit charset: Charset = DefaultCharset): Source[InputStream] = 48 | readerSource.contramap(_.reader(charset)) 49 | } 50 | } 51 | 52 | def apply[A: Source](a: A, splitter: StringSplitter = StringSplitter.Default): Scanner = 53 | new Scanner { 54 | private[this] val reader = implicitly[Source[A]].apply(a) 55 | private[this] val tokens = reader.tokens(splitter) 56 | override def lineNumber() = reader.getLineNumber 57 | override def nextLine() = Option(reader.readLine()).getOrElse(throw new NoSuchElementException("End of file")) 58 | override def next() = tokens.next() 59 | override def hasNext = tokens.hasNext 60 | override def close() = reader.close() 61 | } 62 | 63 | trait Read[A] { 64 | def apply(s: String): A 65 | } 66 | 67 | object Read { 68 | def apply[A](f: String => A): Read[A] = 69 | new Read[A] { 70 | override def apply(s: String) = f(s) 71 | } 72 | 73 | trait Implicits { 74 | implicit val stringRead: Read[String] = Read(identity) 75 | implicit val booleanRead: Read[Boolean] = Read(_.toBoolean) 76 | implicit val byteRead: Read[Byte] = Read(_.toByte) 77 | implicit val shortRead: Read[Short] = Read(_.toShort) 78 | implicit val intRead: Read[Int] = Read(_.toInt) 79 | implicit val longRead: Read[Long] = Read(_.toLong) 80 | implicit val bigIntRead: Read[BigInt] = Read(BigInt(_)) 81 | implicit val floatRead: Read[Float] = Read(_.toFloat) 82 | implicit val doubleRead: Read[Double] = Read(_.toDouble) 83 | implicit val bigDecimalRead: Read[BigDecimal] = Read(BigDecimal(_)) 84 | implicit def optionRead[A: Read]: Read[Option[A]] = Read(s => when(s.nonEmpty)(implicitly[Read[A]].apply(s))) 85 | 86 | // Java's time readers 87 | import java.sql.{Date => SqlDate, Time => SqlTime, Timestamp => SqlTimestamp} 88 | import java.time._ 89 | 90 | implicit val durationRead: Read[Duration] = Read(Duration.parse(_)) 91 | implicit val instantRead: Read[Instant] = Read(Instant.parse(_)) 92 | implicit val localDateTimeRead: Read[LocalDateTime] = Read(LocalDateTime.parse(_)) 93 | implicit val localDateRead: Read[LocalDate] = Read(LocalDate.parse(_)) 94 | implicit val monthDayRead: Read[MonthDay] = Read(MonthDay.parse(_)) 95 | implicit val offsetDateTimeRead: Read[OffsetDateTime] = Read(OffsetDateTime.parse(_)) 96 | implicit val offsetTimeRead: Read[OffsetTime] = Read(OffsetTime.parse(_)) 97 | implicit val periodRead: Read[Period] = Read(Period.parse(_)) 98 | implicit val yearRead: Read[Year] = Read(Year.parse(_)) 99 | implicit val yearMonthRead: Read[YearMonth] = Read(YearMonth.parse(_)) 100 | implicit val zonedDateTimeRead: Read[ZonedDateTime] = Read(ZonedDateTime.parse(_)) 101 | implicit val sqlDateRead: Read[SqlDate] = Read(SqlDate.valueOf) 102 | implicit val sqlTimeRead: Read[SqlTime] = Read(SqlTime.valueOf) 103 | implicit val sqlTimestampRead: Read[SqlTimestamp] = Read(SqlTimestamp.valueOf) 104 | 105 | /** Use this to create custom readers e.g. to read a LocalDate using some custom format 106 | * val readLocalDate: Read[LocalDate] = Read.temporalQuery(format = myFormat, query = LocalDate.from) 107 | */ 108 | def temporalQuery[A](format: DateTimeFormatter, query: temporal.TemporalQuery[A]): Read[A] = 109 | Read(format.parse(_, query)) 110 | } 111 | } 112 | } 113 | 114 | /** Implement this trait to make thing parsable 115 | * In most cases, use Scanner.Read typeclass when you simply need access to one String token 116 | * Use Scannable typeclass if you need access to the full scanner e.g. to detect encodings etc. 117 | */ 118 | trait Scannable[A] { 119 | def apply(scanner: Scanner): A 120 | } 121 | 122 | object Scannable { 123 | def apply[A](f: Scanner => A): Scannable[A] = 124 | new Scannable[A] { 125 | override def apply(scanner: Scanner) = f(scanner) 126 | } 127 | 128 | implicit def fromRead[A](implicit read: Scanner.Read[A]): Scannable[A] = 129 | Scannable(s => read(s.next())) 130 | 131 | implicit def tuple2[T1, T2](implicit t1: Scannable[T1], t2: Scannable[T2]): Scannable[(T1, T2)] = 132 | Scannable(s => t1(s) -> t2(s)) 133 | 134 | implicit def iterator[A](implicit scanner: Scannable[A]): Scannable[Iterator[A]] = 135 | Scannable(s => Iterator.continually(scanner(s)).withHasNext(s.hasNext)) 136 | } 137 | 138 | trait StringSplitter { 139 | def split(s: String): IterableOnce[String] 140 | } 141 | object StringSplitter { 142 | val Default = StringSplitter.anyOf(" \t\t\n\r") 143 | 144 | /** Split string on this character 145 | * This will return exactly 1 + n number of items where n is the number of occurrence of delimiter in String s 146 | */ 147 | def on(delimiter: Char): StringSplitter = 148 | new StringSplitter { 149 | override def split(s: String) = 150 | new Iterator[String] { 151 | private[this] var i = 0 152 | private[this] var j = -1 153 | private[this] val c = delimiter.toInt 154 | _next() 155 | 156 | private[this] def _next() = { 157 | i = j + 1 158 | val k = s.indexOf(c, i) 159 | j = if (k < 0) s.length else k 160 | } 161 | 162 | override def hasNext = i <= s.length 163 | 164 | override def next() = { 165 | val res = s.substring(i, j) 166 | _next() 167 | res 168 | } 169 | } 170 | } 171 | 172 | /** Split this string using ANY of the characters from delimiters */ 173 | def anyOf(delimiters: String, includeDelimiters: Boolean = false): StringSplitter = 174 | new StringSplitter { 175 | override def split(s: String) = new StringTokenizer(s, delimiters, includeDelimiters) 176 | } 177 | 178 | /** Split string using a regex pattern */ 179 | def regex(pattern: String): StringSplitter = 180 | new StringSplitter { 181 | override def split(s: String) = s.split(pattern, -1) 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /src/main/scala/better/files/TeeOutputStream.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io.OutputStream 4 | 5 | /** Write to multiple outputstreams at once 6 | * If error happens on any one while doing an operation, only the last error is reported 7 | */ 8 | class TeeOutputStream(outs: OutputStream*) extends OutputStream { 9 | override def write(b: Int) = tryAll(outs)(_.write(b)) 10 | override def flush() = tryAll(outs)(_.flush()) 11 | override def write(b: Array[Byte]) = tryAll(outs)(_.write(b)) 12 | override def write(b: Array[Byte], off: Int, len: Int) = tryAll(outs)(_.write(b, off, len)) 13 | override def close() = tryAll(outs)(_.close()) 14 | } 15 | 16 | /** A sink outputstream similar to /dev/null - just consumes everything */ 17 | object NullOutputStream extends OutputStream { 18 | override def write(b: Int) = {} 19 | } 20 | -------------------------------------------------------------------------------- /src/main/scala/better/files/UnicodeCharset.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.nio.{BufferOverflowException, ByteBuffer, CharBuffer} 4 | import java.nio.charset._ 5 | 6 | import scala.jdk.CollectionConverters._ 7 | 8 | /** A Unicode charset that handles byte-order markers 9 | * 10 | * @param underlyingCharset Use this charset if no known byte-order marker is detected; use this for encoding too 11 | * @param writeByteOrderMarkers If set, write BOMs while encoding 12 | */ 13 | class UnicodeCharset(underlyingCharset: Charset, writeByteOrderMarkers: Boolean) 14 | extends Charset(underlyingCharset.name(), underlyingCharset.aliases().asScala.toArray) { 15 | override def newDecoder() = new UnicodeDecoder(underlyingCharset) 16 | override def newEncoder() = 17 | if (writeByteOrderMarkers) new BomEncoder(underlyingCharset) else underlyingCharset.newEncoder() 18 | override def contains(cs: Charset) = underlyingCharset.contains(cs) 19 | } 20 | 21 | /** A Unicode decoder that uses the Unicode byte-order marker (BOM) to auto-detect the encoding 22 | * (if none detected, falls back on the defaultCharset). This also gets around a bug in the JDK 23 | * (http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4508058) where BOM is not consumed for UTF-8. 24 | * See: https://github.com/pathikrit/better-files/issues/107 25 | * 26 | * @param defaultCharset Use this charset if no known byte-order marker is detected 27 | */ 28 | class UnicodeDecoder(defaultCharset: Charset) extends CharsetDecoder(defaultCharset, 1, 1) { 29 | import UnicodeCharset.bomTable 30 | 31 | private[this] var inferredCharset: Option[Charset] = None 32 | 33 | @annotation.tailrec 34 | private[this] def decode( 35 | in: ByteBuffer, 36 | out: CharBuffer, 37 | candidates: Set[Charset] = Set.empty, 38 | firstCall: Boolean 39 | ): CoderResult = { 40 | if (isCharsetDetected) { 41 | detectedCharset().newDecoder().decode(in, out, false) 42 | } else if (firstCall && in.position() != 0) { 43 | // See: https://github.com/pathikrit/better-files/pull/384 44 | inferredCharset = Some(defaultCharset) 45 | decode(in, out, firstCall = false) 46 | } else if (candidates.isEmpty || !in.hasRemaining) { 47 | inferredCharset = Some(defaultCharset) 48 | in.rewind() 49 | decode(in, out, firstCall = false) 50 | } else if (candidates.forall(c => bomTable(c).length == in.position())) { 51 | inferredCharset = candidates.headOption.ensuring(candidates.size == 1, "Ambiguous BOMs found") 52 | decode(in, out, firstCall = false) 53 | } else { 54 | val idx = in.position() 55 | val byte = in.get() 56 | def isPossible(charset: Charset) = bomTable(charset).lift(idx).contains(byte) 57 | decode(in, out, candidates.filter(isPossible), firstCall = false) 58 | } 59 | } 60 | 61 | override def decodeLoop(in: ByteBuffer, out: CharBuffer) = 62 | decode(in = in, out = out, candidates = bomTable.keySet, firstCall = true) 63 | 64 | override def isCharsetDetected = inferredCharset.isDefined 65 | 66 | override def isAutoDetecting = true 67 | 68 | override def implReset() = inferredCharset = None 69 | 70 | override def detectedCharset() = 71 | inferredCharset.getOrElse(throw new IllegalStateException("Insufficient bytes read to determine charset")) 72 | } 73 | 74 | /** Encoder that writes the BOM for this charset */ 75 | class BomEncoder(charset: Charset) extends CharsetEncoder(charset, 1, 1) { 76 | private[this] val bom = UnicodeCharset.bomTable 77 | .getOrElse(charset, throw new IllegalArgumentException(s"$charset does not support BOMs")) 78 | .toArray 79 | private[this] var isBomWritten = false 80 | 81 | override def encodeLoop(in: CharBuffer, out: ByteBuffer): CoderResult = { 82 | if (!isBomWritten) { 83 | try { 84 | out.put(bom): Unit 85 | } catch { 86 | case _: BufferOverflowException => return CoderResult.OVERFLOW 87 | } finally { 88 | isBomWritten = true 89 | } 90 | } 91 | charset.newEncoder().encode(in, out, true) 92 | } 93 | 94 | override def implReset() = isBomWritten = false 95 | } 96 | 97 | object UnicodeCharset { 98 | private[files] val bomTable: Map[Charset, IndexedSeq[Byte]] = Map( 99 | "UTF-8" -> IndexedSeq(0xef, 0xbb, 0xbf), 100 | "UTF-16BE" -> IndexedSeq(0xfe, 0xff), 101 | "UTF-16LE" -> IndexedSeq(0xff, 0xfe), 102 | "UTF-32BE" -> IndexedSeq(0x00, 0x00, 0xfe, 0xff), 103 | "UTF-32LE" -> IndexedSeq(0xff, 0xfe, 0x00, 0x00) 104 | ).collect { 105 | case (charset, bytes) if Charset.isSupported(charset) => Charset.forName(charset) -> bytes.map(_.toByte) 106 | }.ensuring(_.nonEmpty, "No unicode charset detected") 107 | 108 | def isValid(charset: Charset): Boolean = bomTable.contains(charset) 109 | 110 | def apply(charset: Charset, writeByteOrderMarkers: Boolean = false): Charset = 111 | if (isValid(charset)) new UnicodeCharset(charset, writeByteOrderMarkers) 112 | else charset 113 | } 114 | -------------------------------------------------------------------------------- /src/main/scala/better/files/WriterOutputStream.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io.{OutputStream, Writer} 4 | import java.nio.{ByteBuffer, CharBuffer} 5 | import java.nio.charset.{Charset, CharsetDecoder, CodingErrorAction} 6 | 7 | import scala.annotation.tailrec 8 | 9 | /** Code ported from Java to Scala: 10 | * https://github.com/apache/commons-io/blob/d357d9d563c4a34fa2ab3cdc68221c851a9de4f5/src/main/java/org/apache/commons/io/output/WriterOutputStream.java 11 | */ 12 | class WriterOutputStream(writer: Writer, decoder: CharsetDecoder, bufferSize: Int, flushImmediately: Boolean) extends OutputStream { 13 | 14 | /** CharBuffer used as output for the decoder */ 15 | private[this] val decoderOut = CharBuffer.allocate(bufferSize) 16 | 17 | /** ByteBuffer used as output for the decoder. This buffer can be small 18 | * as it is only used to transfer data from the decoder to the buffer provided by the caller. 19 | */ 20 | private[this] val decoderIn = ByteBuffer.allocate(bufferSize >> 4) 21 | 22 | def this( 23 | writer: Writer, 24 | bufferSize: Int = DefaultBufferSize, 25 | flushImmediately: Boolean = false, 26 | charset: Charset = DefaultCharset 27 | ) = 28 | this( 29 | writer = writer, 30 | decoder = charset.newDecoder 31 | .onMalformedInput(CodingErrorAction.REPLACE) 32 | .onUnmappableCharacter(CodingErrorAction.REPLACE) 33 | .replaceWith("?"), 34 | bufferSize = bufferSize, 35 | flushImmediately = flushImmediately 36 | ) 37 | 38 | override def write(b: Array[Byte], off: Int, len: Int) = { 39 | @tailrec def loop(off: Int, len: Int): Unit = 40 | if (len > 0) { 41 | val c = decoderIn.remaining min len 42 | decoderIn.put(b, off, c) 43 | processInput(endOfInput = false) 44 | loop(off + c, len - c) 45 | } 46 | loop(off, len) 47 | if (flushImmediately) flushOutput() 48 | } 49 | 50 | override def write(b: Int) = write(Array(b.toByte)) 51 | 52 | override def flush() = { 53 | flushOutput() 54 | writer.flush() 55 | } 56 | 57 | override def close() = { 58 | processInput(endOfInput = true) 59 | flushOutput() 60 | writer.close() 61 | } 62 | 63 | private[this] def processInput(endOfInput: Boolean): Unit = { 64 | decoderIn.flip() 65 | @tailrec def loop(): Unit = { 66 | val coderResult = decoder.decode(decoderIn, decoderOut, endOfInput) 67 | if (coderResult.isOverflow) { 68 | flushOutput() 69 | loop() 70 | } else { 71 | assert(coderResult.isUnderflow, "decoder is configured to replace malformed input and unmappable characters") 72 | } 73 | } 74 | loop() 75 | decoderIn.compact() 76 | } 77 | 78 | private[this] def flushOutput(): Unit = { 79 | val p = decoderOut.position() 80 | if (p > 0) { 81 | writer.write(decoderOut.array, 0, p) 82 | decoderOut.rewind() 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/main/scala/better/files/package.scala: -------------------------------------------------------------------------------- 1 | package better 2 | 3 | import java.io.StreamTokenizer 4 | import java.nio.charset.Charset 5 | 6 | import scala.util.{Failure, Success, Try} 7 | import scala.util.control.NonFatal 8 | 9 | package object files extends Implicits { 10 | 11 | /** Default array buffer size 12 | * Seems like a good value used by JDK: (see: java.io.BufferedInputStream.DEFAULT_BUFFER_SIZE) 13 | */ 14 | val DefaultBufferSize = 8192 15 | 16 | /** The Default charset used by better-files 17 | * Note: It uses java.net.charset.Charset.DefaultCharset() in general but if the Default supports byte-order markers, 18 | * it uses a more compliant version than the JDK one (see: https://github.com/pathikrit/better-files/issues/107) 19 | */ 20 | val DefaultCharset: Charset = 21 | UnicodeCharset(Charset.defaultCharset()) 22 | 23 | val EOF = StreamTokenizer.TT_EOF 24 | 25 | /** Similar to the `with` keyword in Python and `using` keyword in .NET and `try-with-resource` syntax in Java, 26 | * this let's you use and dispose a resource e.g. 27 | * 28 | * {{ 29 | * val lines: List[String] = using(file.newInputStream) { stream => 30 | * stream.lines.toList // Must be eager so .toList 31 | * } 32 | * }} 33 | */ 34 | def using[A: Disposable, B](resource: A)(f: A => B): B = 35 | new Dispose(resource).apply(f) 36 | 37 | @inline private[files] def when[A](condition: Boolean)(f: => A): Option[A] = if (condition) Some(f) else None 38 | 39 | @inline private[files] def repeat[U](n: Int)(f: => U): Unit = (1 to n).foreach(_ => f) 40 | 41 | private[files] def eofReader(read: => Int): Iterator[Int] = Iterator.continually(read).takeWhile(_ != EOF) 42 | 43 | /** This is the Scala equivalent of how javac compiles try-with-resources, 44 | * Except that fatal exceptions while disposing take precedence over exceptions thrown previously 45 | */ 46 | private[files] def tryWith[A](f: => A, close: () => Unit, finallyClose: Boolean): A = 47 | try { 48 | f 49 | } catch { 50 | case evalError: Throwable => 51 | try { 52 | close() 53 | } catch { 54 | case NonFatal(closingError) => 55 | evalError.addSuppressed(closingError) 56 | case fatalClosingError: Throwable => 57 | fatalClosingError.addSuppressed(evalError) 58 | throw fatalClosingError 59 | } 60 | throw evalError 61 | } finally { 62 | if (finallyClose) close() 63 | } 64 | 65 | /** Utility to apply f on all xs skipping over errors 66 | * Throws the last error that happened 67 | */ 68 | private[files] def tryAll[A](xs: Seq[A])(f: A => Unit): Unit = { 69 | val res = xs.foldLeft(Option.empty[Throwable]) { case (currError, a) => 70 | Try(f(a)) match { 71 | case Success(_) => currError 72 | case Failure(e) => Some(e) 73 | } 74 | } 75 | res.foreach(throwable => throw throwable) 76 | } 77 | 78 | private[files] def toHex(bytes: Array[Byte]): String = 79 | String.format("%0" + (bytes.length << 1) + "X", new java.math.BigInteger(1, bytes)) 80 | } 81 | -------------------------------------------------------------------------------- /src/site-preprocess/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | better-files 7 | 8 | 9 | 10 | 32 | 33 | 34 | 35 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /src/site/tech_talk_preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pathikrit/better-files/faffdb87ec0656f56c9f8e2f0dd9e846285bf4f2/src/site/tech_talk_preview.png -------------------------------------------------------------------------------- /src/test/java/better/files/benchmarks/ArrayBufferScanner.java: -------------------------------------------------------------------------------- 1 | package better.files.benchmarks; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.IOException; 5 | import java.io.UncheckedIOException; 6 | import java.util.Arrays; 7 | 8 | /** 9 | * Hand built using a char buffer 10 | */ 11 | public class ArrayBufferScanner extends AbstractScanner { 12 | private char[] buffer = new char[1 << 4]; 13 | private int pos = 1; 14 | 15 | private BufferedReader reader; 16 | 17 | public ArrayBufferScanner(BufferedReader reader) { 18 | super(reader); 19 | this.reader = reader; 20 | } 21 | 22 | @Override 23 | public boolean hasNext() { 24 | return pos > 0; 25 | } 26 | 27 | private void loadBuffer() { 28 | pos = 0; 29 | try { 30 | for (int i; (i = reader.read()) != -1; ) { 31 | char c = (char) i; 32 | if (c != ' ' && c != '\n' && c != '\t' && c != '\r' && c != '\f') { 33 | if (pos == buffer.length) buffer = Arrays.copyOf(buffer, 2 * pos); 34 | buffer[pos++] = c; 35 | } else if (pos != 0) break; 36 | } 37 | } catch (IOException e) { 38 | throw new UncheckedIOException(e); 39 | } 40 | } 41 | 42 | public String current() { 43 | return String.copyValueOf(buffer, 0, pos); 44 | } 45 | 46 | @Override 47 | public String next() { 48 | loadBuffer(); 49 | return current(); 50 | } 51 | 52 | @Override 53 | public String nextLine() { 54 | try { 55 | return reader.readLine(); 56 | } catch (IOException e) { 57 | throw new UncheckedIOException(e); 58 | } 59 | } 60 | 61 | @Override 62 | public int nextInt() { 63 | loadBuffer(); 64 | final int radix = 10; 65 | int result = 0; 66 | int i = buffer[0] == '-' || buffer[0] == '+' ? 1 : 0; 67 | for (checkValidNumber(pos > i); i < pos; i++) { 68 | int digit = buffer[i] - '0'; 69 | checkValidNumber(0 <= digit && digit <= 9); 70 | result = result * radix + digit; 71 | } 72 | return buffer[0] == '-' ? -result : result; 73 | } 74 | 75 | private void checkValidNumber(boolean condition) { 76 | if (!condition) throw new NumberFormatException(current()); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/test/resources/better/files/issue-624.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pathikrit/better-files/faffdb87ec0656f56c9f8e2f0dd9e846285bf4f2/src/test/resources/better/files/issue-624.zip -------------------------------------------------------------------------------- /src/test/resources/better/files/issues-262.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pathikrit/better-files/faffdb87ec0656f56c9f8e2f0dd9e846285bf4f2/src/test/resources/better/files/issues-262.zip -------------------------------------------------------------------------------- /src/test/resources/better/files/issues-326.txt: -------------------------------------------------------------------------------- 1 | 第一回·宴桃园豪杰三结义 斩黄巾英雄首立功 2 | 3 | 滚滚长江东逝水,浪花淘尽英雄。是非成败转头空。青山依旧在,几度夕阳红。 白发渔樵江渚上,惯看秋月春风。一壶浊酒喜相逢。古今多少事,都付笑谈中。 4 | 5 | ——调寄《临江仙》 6 | 7 | 话说天下大势,分久必合,合久必分。周末七国分争,并入于秦。及秦灭之后,楚、汉分争,又并入于汉。汉朝自高祖斩白蛇而起义,一统天下,后来光武中兴,传至献帝,遂分为三国。推其致乱之由,殆始于桓、灵二帝。桓帝禁锢善类,崇信宦官。及桓帝崩,灵帝即位,大将军窦武、太傅陈蕃,共相辅佐。时有宦官曹节等弄权,窦武、陈蕃谋诛之,机事不密,反为所害,中涓自此愈横。 8 | 9 | 建宁二年四月望日,帝御温德殿。方升座,殿角狂风骤起。只见一条大青蛇,从梁上飞将下来,蟠于椅上。帝惊倒,左右急救入宫,百官俱奔避。须臾,蛇不见了。忽然大雷大雨,加以冰雹,落到半夜方止,坏却房屋无数。建宁四年二月,洛阳地震;又海水泛溢,沿海居民,尽被大浪卷入海中。光和元年,雌鸡化雄。六月朔,黑气十余丈,飞入温雄殿中。秋七月,有虹现于玉堂;五原山岸,尽皆崩裂。种种不祥,非止一端。帝下诏问群臣以灾异之由,议郎蔡邕上疏,以为蜺堕鸡化,乃妇寺干政之所致,言颇切直。帝览奏叹息,因起更衣。曹节在后窃视,悉宣告左右;遂以他事陷邕于罪,放归田里。后张让、赵忠、封谞、段珪、曹节、侯览、蹇硕、程旷、夏惲、郭胜十人朋比为奸,号为“十常侍”。帝尊信张让,呼为“阿父”。朝政日非,以致天下人心思乱,盗贼蜂起。 10 | 11 | 时巨鹿郡有兄弟三人,一名张角,一名张宝,一名张梁。那张角本是个不第秀才,因入山采药,遇一老人,碧眼童颜,手执藜杖,唤角至一洞中,以天书三卷授之,曰:“此名《太平要术》,汝得之,当代天宣化,普救世人;若萌异心,必获恶报。”角拜问姓名。老人曰:“吾乃南华老仙也。”言讫,化阵清风而去。角得此书,晓夜攻习,能呼风唤雨,号为“太平道人”。中平元年正月内,疫气流行,张角散施符水,为人治病,自称“大贤良师”。角有徒弟五百余人,云游四方,皆能书符念咒。次后徒众日多,角乃立三十六方,大方万余人,小方六七千,各立渠帅,称为将军;讹言:“苍天已死,黄天当立;岁在甲子,天下大吉。”令人各以白土,书“甲子”二字于家中大门上。青、幽、徐、冀、荆、扬、兖、豫八州之人,家家侍奉大贤良师张角名字。角遣其党马元义,暗赍金帛,结交中涓封谞,以为内应。角与二弟商议曰:“至难得者,民心也。今民心已顺,若不乘势取天下,诚为可惜。”遂一面私造黄旗,约期举事;一面使弟子唐周,驰书报封谞。唐周乃径赴省中告变。帝召大将军何进调兵擒马元义,斩之;次收封谞等一干人下狱。张角闻知事露,星夜举兵,自称“天公将军”,张宝称“地公将军”,张梁称“人公将军”。申言于众曰:“今汉运将终,大圣人出。汝等皆宜顺天从正,以乐太平。”四方百姓,裹黄巾从张角反者四五十万。贼势浩大,官军望风而靡。何进奏帝火速降诏,令各处备御,讨贼立功。一面遣中郎将卢植、皇甫嵩、朱俊,各引精兵、分三路讨之。 12 | 13 | 且说张角一军,前犯幽州界分。幽州太守刘焉,乃江夏竟陵人氏,汉鲁恭王之后也。当时闻得贼兵将至,召校尉邹靖计议。靖曰:“贼兵众,我兵寡,明公宜作速招军应敌。”刘焉然其说,随即出榜招募义兵。 14 | 15 | 榜文行到涿县,引出涿县中一个英雄。那人不甚好读书;性宽和,寡言语,喜怒不形于色;素有大志,专好结交天下豪杰;生得身长七尺五寸,两耳垂肩,双手过膝,目能自顾其耳,面如冠玉,唇若涂脂;中山靖王刘胜之后,汉景帝阁下玄孙,姓刘,名备,字玄德。昔刘胜之子刘贞,汉武时封涿鹿亭侯,后坐酎金失侯,因此遗这一枝在涿县。玄德祖刘雄,父刘弘。弘曾举孝廉,亦尝作吏,早丧。玄德幼孤,事母至孝;家贫,贩屦织席为业。家住本县楼桑村。其家之东南,有一大桑树,高五丈余,遥望之,童童如车盖。相者云:“此家必出贵人。”玄德幼时,与乡中小儿戏于树下,曰:“我为天子,当乘此车盖。”叔父刘元起奇其言,曰:“此儿非常人也!”因见玄德家贫,常资给之。年十五岁,母使游学,尝师事郑玄、卢植,与公孙瓚等为友。 16 | 17 | 及刘焉发榜招军时,玄德年已二十八岁矣。当日见了榜文,慨然长叹。随后一人厉声言曰:“大丈夫不与国家出力,何故长叹?”玄德回视其人,身长八尺,豹头环眼,燕颔虎须,声若巨雷,势如奔马。玄德见他形貌异常,问其姓名。其人曰:“某姓张,名飞,字翼德。世居涿郡,颇有庄田,卖酒屠猪,专好结交天下豪杰。恰才见公看榜而叹,故此相问。”玄德曰:“我本汉室宗亲,姓刘,名备。今闻黄巾倡乱,有志欲破贼安民,恨力不能,故长叹耳。”飞曰:“吾颇有资财,当招募乡勇,与公同举大事,如何。”玄德甚喜,遂与同入村店中饮酒。 18 | 19 | 正饮间,见一大汉,推着一辆车子,到店门首歇了,入店坐下,便唤酒保:“快斟酒来吃,我待赶入城去投军。”玄德看其人:身长九尺,髯长二尺;面如重枣,唇若涂脂;丹凤眼,卧蚕眉,相貌堂堂,威风凛凛。玄德就邀他同坐,叩其姓名。其人曰:“吾姓关,名羽,字长生,后改云长,河东解良人也。因本处势豪倚势凌人,被吾杀了,逃难江湖,五六年矣。今闻此处招军破贼,特来应募。”玄德遂以己志告之,云长大喜。同到张飞庄上,共议大事。飞曰:“吾庄后有一桃园,花开正盛;明日当于园中祭告天地,我三人结为兄弟,协力同心,然后可图大事。”玄德、云长齐声应曰:“如此甚好。” 20 | 21 | 次日,于桃园中,备下乌牛白马祭礼等项,三人焚香再拜而说誓曰:“念刘备、关羽、张飞,虽然异姓,既结为兄弟,则同心协力,救困扶危;上报国家,下安黎庶。不求同年同月同日生,只愿同年同月同日死。皇天后土,实鉴此心,背义忘恩,天人共戮!”誓毕,拜玄德为兄,关羽次之,张飞为弟。祭罢天地,复宰牛设酒,聚乡中勇士,得三百余人,就桃园中痛饮一醉。来日收拾军器,但恨无马匹可乘。正思虑间,人报有两个客人,引一伙伴当,赶一群马,投庄上来。玄德曰:“此天佑我也!”三人出庄迎接。原来二客乃中山大商:一名张世平,一名苏双,每年往北贩马,近因寇发而回。玄德请二人到庄,置酒管待,诉说欲讨贼安民之意。二客大喜,愿将良马五十匹相送;又赠金银五百两,镔铁一千斤,以资器用。 22 | 23 | 玄德谢别二客,便命良匠打造双股剑。云长造青龙偃月刀,又名“冷艳锯”,重八十二斤。张飞造丈八点钢矛。各置全身铠甲。共聚乡勇五百余人,来见邹靖。邹靖引见太守刘焉。三人参见毕,各通姓名。玄德说起宗派,刘焉大喜,遂认玄德为侄。不数日,人报黄巾贼将程远志统兵五万来犯涿郡。刘焉令邹靖引玄德等三人,统兵五百,前去破敌。玄德等欣然领军前进,直至大兴山下,与贼相见。贼众皆披发,以黄巾抹额。当下两军相对,玄德出马,左有云长,右有翼德,扬鞭大骂:“反国逆贼,何不早降!”程远志大怒,遣副将邓茂出战。张飞挺丈八蛇矛直出,手起处,刺中邓茂心窝,翻身落马。程远志见折了邓茂,拍马舞刀,直取张飞。云长舞动大刀,纵马飞迎。程远志见了,早吃一惊,措手不及,被云长刀起处,挥为两段。后人有诗赞二人曰:英雄露颖在今朝,一试矛兮一试刀。初出便将威力展,三分好把姓名标。 24 | 25 | 众贼见程远志被斩,皆倒戈而走。玄德挥军追赶,投降者不计其数,大胜而回。刘焉亲自迎接,赏劳军士。次日,接得青州太守龚景牒文,言黄巾贼围城将陷,乞赐救援。刘焉与玄德商议。玄德曰:“备愿往救之。”刘焉令邹靖将兵五千,同玄德、关、张,投青州来。贼众见救军至,分兵混战。玄德兵寡不胜,退三十里下寨。 26 | 27 | 玄德谓关、张曰:“贼众我寡;必出奇兵,方可取胜。”乃分关公引一千军伏山左,张飞引一千军伏山右,鸣金为号,齐出接应。次日,玄德与邹靖引军鼓噪而进。贼众迎战,玄德引军便退。贼众乘势追赶,方过山岭,玄德军中一齐鸣金,左右两军齐出,玄德麾军回身复杀。三路夹攻,贼众大溃。直赶至青州城下,太守龚景亦率民兵出城助战。贼势大败,剿戮极多,遂解青州之围。后人有诗赞玄德曰:运筹决算有神功,二虎还须逊一龙。初出便能垂伟绩,自应分鼎在孤穷。 28 | 29 | 龚景犒军毕,邹靖欲回。玄德曰:“近闻中郎将卢植与贼首张角战于广宗,备昔曾师事卢植,欲往助之。”于是邹靖引军自回,玄德与关、张引本部五百人投广宗来。至卢植军中,入帐施礼,具道来意。卢植大喜,留在帐前听调。 30 | 31 | 时张角贼众十五万,植兵五万,相拒于广宗,未见胜负。植谓玄德曰:“我今围贼在此,贼弟张梁、张宝在颍川,与皇甫嵩、朱俊对垒。汝可引本部人马,我更助汝一千官军,前去颍川打探消息,约期剿捕。”玄德领命,引军星夜投颍川来。 32 | 33 | 时皇甫嵩、朱俊领军拒贼,贼战不利,退入长社,依草结营。嵩与俊计曰:“贼依草结营,当用火攻之。”遂令军士,每人束草一把,暗地埋伏。其夜大风忽起。二更以后,一齐纵火,嵩与俊各引兵攻击贼寨,火焰张天,贼众惊慌,马不及鞍,人不及甲,四散奔走。 34 | 35 | 杀到天明,张梁、张宝引败残军士,夺路而走。忽见一彪军马,尽打红旗,当头来到,截住去路。为首闪出一将,身长七尺,细眼长髯,官拜骑都尉,沛国谯郡人也,姓曹,名操,字孟德。操父曹嵩,本姓夏侯氏,因为中常侍曹腾之养子,故冒姓曹。曹嵩生操,小字阿瞒,一名吉利。操幼时,好游猎,喜歌舞,有权谋,多机变。操有叔父,见操游荡无度,尝怒之,言于曹嵩。嵩责操。操忽心生一计,见叔父来,诈倒于地,作中风之状。叔父惊告嵩,嵩急视之。操故无恙。嵩曰:“叔言汝中风,今已愈乎?”操曰:“儿自来无此病;因失爱于叔父,故见罔耳。”嵩信其言。后叔父但言操过,嵩并不听。因此,操得恣意放荡。时人有桥玄者,谓操曰:“天下将乱,非命世之才不能济。能安之者,其在君乎?”南阳何颙见操,言:“汉室将亡,安天下者,必此人也。”汝南许劭,有知人之名。操往见之,问曰:“我何如人?”劭不答。又问,劭曰:“子治世之能臣,乱世之奸雄也。”操闻言大喜。年二十,举孝廉,为郎,除洛阳北部尉。初到任,即设五色棒十余条于县之四门,有犯禁者,不避豪贵,皆责之。中常侍蹇硕之叔,提刀夜行,操巡夜拿住,就棒责之。由是,内外莫敢犯者,威名颇震。后为顿丘令,因黄巾起,拜为骑都尉,引马步军五千,前来颍川助战。正值张梁、张宝败走,曹操拦住,大杀一阵,斩首万余级,夺得旗幡、金鼓、马匹极多。张梁、张宝死战得脱。操见过皇甫嵩、朱俊,随即引兵追袭张梁、张宝去了。 36 | 37 | 却说玄德引关、张来颍川,听得喊杀之声,又望见火光烛天,急引兵来时,贼已败散。玄德见皇甫嵩、朱俊,具道卢植之意。嵩曰:“张梁、张宝势穷力乏,必投广宗去依张角。玄德可即星夜往助。”玄德领命,遂引兵复回。得到半路,只见一簇军马,护送一辆槛车,车中之囚,乃卢植也。玄德大惊,滚鞍下马,问其缘故。植曰:“我围张角,将次可破;因角用妖术,未能即胜。朝廷差黄门左丰前来体探,问我索取贿赂。我答曰:‘军粮尚缺,安有余钱奉承天使?’左丰挟恨,回奏朝廷,说我高垒不战,惰慢军心;因此朝廷震怒,遣中郎将董卓来代将我兵,取我回京问罪。”张飞听罢,大怒,要斩护送军人,以救卢植。玄德急止之曰:“朝廷自有公论,汝岂可造次?”军士簇拥卢植去了。关公曰:“卢中郎已被逮,别人领兵,我等去无所依,不如且回涿郡。”玄德从其言,遂引军北行。行无二日,忽闻山后喊声大震。玄德引关、张纵马上高冈望之,见汉军大败,后面漫山塞野,黄巾盖地而来,旗上大书“天公将军”。玄德曰:“此张角也!可速战!”三人飞马引军而出。张角正杀败董卓,乘势赴来,忽遇三人冲杀,角军大乱,败走五十余里。 38 | 39 | 三人救了董卓回寨。卓问三人现居何职。玄德曰:“白身。”卓甚轻之,不为礼。玄德出,张飞大怒曰:“我等亲赴血战,救了这厮,他却如此无礼。若不杀之,难消我气!”便要提刀入帐来杀董卓。正是:人情势利古犹今,谁识英雄是白身?安得快人如翼德,尽诛世上负心人! 40 | 41 | 毕竟董卓性命如何,且听下文分解。 42 | 43 | 第二回·张翼德怒鞭督邮 何国舅谋诛宦竖 44 | 45 | 且说董卓字仲颖,陇西临洮人也,官拜河东太守,自来骄傲。当日怠慢了玄德,张飞性发,便欲杀之。玄德与关公急止之曰;“他是朝廷命官,岂可擅杀?”飞曰:“若不杀这厮,反要在他部下听令,其实不甘!二兄要便住在此,我自投别处去也!”玄德曰:“我三人义同生死,岂可相离?不若都投别处去便了。”飞曰:“若如此,稍解吾恨。” 46 | 47 | 连夜引军来投朱俊。俊待之甚厚,合兵一处,进讨张宝。是时曹操自跟皇甫嵩讨张梁,大战于曲阳。这里朱俊进攻张宝。张宝引贼众八九万,屯于山后。俊令玄德为其先锋,与贼对敌。张宝遣副将高升出马搦战,玄德使张飞击之。飞纵马挺矛,与升交战,不数合,刺升落马。玄德麾军直冲过去。张宝就马上披发仗剑,作起妖法。只见风雷大作,一股黑气,从天而降,黑气中似有无限人马杀来。玄德连忙回军,军中大乱。败阵而归,与朱俊计议。俊曰:“彼用妖术,我来日可宰猪羊狗血,令军士伏于山头;候贼赶来,从高坡上泼之,其法可解。”玄德听令,拨关公、张飞各引军一千,伏于山后高冈之上,盛猪羊狗血并秽物准备。次日,张宝摇旗擂鼓,引军搦战,玄德出迎。交锋之际,张宝作法,风雷大作,飞砂走石,黑气漫天,滚滚人马,自天而下。玄德拨马便走,张宝驱兵赶来。将过山头,关、张伏军放起号炮,秽物齐泼。但见空中纸人草马,纷纷坠地;风雷顿息,砂石不飞。兵围住阳城攻打,一面差人打探皇甫嵩消息。探子回报,具说:“皇甫嵩大获胜捷,朝廷以董卓屡败,命嵩代之。嵩到时,张角已死;张梁统其众,与我军相拒,被皇甫嵩连胜七阵,斩张梁于曲阳。发张角之棺,戮尸枭首,送往京师。余众俱降。朝廷加皇甫嵩为车骑将军,领冀州牧。皇甫嵩又表奏卢植有功无罪,朝廷复卢植原官。曹操亦以有功,除济南相,即日将班师赴任。”朱俊听说,催促军马,悉力攻打阳城。贼势危急,贼将严政刺杀张宝,献首投降。朱俊遂平数郡,上表献捷。时又黄巾余党三人:赵弘、韩忠、孙仲,聚众数万,望风烧劫,称与张角报仇。朝廷命朱俊即以得胜之师讨之。俊奉诏,率军前进。时贼据宛城,俊引兵攻之,赵弘遣韩忠出战。俊遣玄德、关、张攻城西南角。韩忠尽率精锐之众,来西南角抵敌。朱俊自纵铁骑二千,径取东北角。贼恐失城,急弃西南而回。玄德从背后掩杀,贼众大败,奔入宛城。朱俊分兵四面围定。城中断粮,韩忠使人出城投降。俊不许。玄德曰:“昔高祖之得天下,盖为能招降纳顺;公何拒韩忠耶?”俊曰:“彼一时,此一时也。昔秦、项之际,天下大乱,民无定主,故招降赏附,以劝来耳。今海内一统,惟黄巾造反;若容其降,无以劝善。使贼得利恣意劫掠,失利便投降:此长寇之志,非良策也。”玄德曰:“不容寇降是矣。今四面围如铁桶,贼乞降不得,必然死战。万人一心,尚不可当,况城中有数万死命之人乎?不若撤去东南,独攻西北。贼必弃城而走,无心恋战,可即擒也。”俊然之,随撤东南二面军马,一齐攻打西北。韩忠果引军弃城而奔。俊与玄德、关、张率三军掩杀,射死韩忠,余皆四散奔走。正追赶间,赵弘、孙仲引贼众到,与俊交战。俊见弘势大,引军暂退。弘乘势复夺宛城。俊离十里下寨。方欲攻打,忽见正东一彪人马到来。为首一将,生得广额阔面,虎体熊腰;吴郡富春人也,姓孙,名坚,字文台,乃孙武子之后。年十七岁时,与父至钱塘,见海贼十余人,劫取商人财物,于岸上分赃。坚谓父曰:“此贼可擒也。”遂奋力提刀上岸,扬声大叫,东西指挥,如唤人状。贼以为官兵至,尽弃财物奔走。坚赶上,杀一贼。由是郡县知名,荐为校尉。后会稽妖贼许昌造反,自称“阳明皇帝”,聚众数万;坚与郡司马招募勇士千余人,会合州郡破之,斩许昌并其子许韶。刺史臧旻上表奏其功,除坚为盐渎丞,又除盱眙丞、下邳丞。今见黄巾寇起,聚集乡中少年及诸商旅,并淮泗精兵一千五百余人,前来接应。 于是三人 48 | -------------------------------------------------------------------------------- /src/test/resources/better/files/test-file.txt: -------------------------------------------------------------------------------- 1 | This is the test-file.txt file. 2 | 3 | It is used to verify that loading of class loader resources works correctly. 4 | -------------------------------------------------------------------------------- /src/test/resources/better/files/test_pkg/another-test-file.txt: -------------------------------------------------------------------------------- 1 | This is the another-test-file.txt file. 2 | 3 | It is used to verify that loading of class loader resources works correctly. 4 | -------------------------------------------------------------------------------- /src/test/scala-2/better/files/ShapelessScannerSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.util.Try 4 | 5 | import better.files.Scanner.Read 6 | 7 | import shapeless._ 8 | 9 | object ShapelessScanner { 10 | implicit val hNilScannable: Scannable[HNil] = 11 | Scannable(_ => HNil) 12 | 13 | implicit def hListScannable[H, T <: HList](implicit h: Lazy[Scannable[H]], t: Scannable[T]): Scannable[H :: T] = 14 | Scannable(s => h.value(s) :: t(s)) 15 | 16 | implicit def genericScannable[A, R](implicit 17 | gen: Generic.Aux[A, R], 18 | reprScannable: Lazy[Scannable[R]] 19 | ): Scannable[A] = 20 | Scannable(s => gen.from(reprScannable.value(s))) 21 | 22 | implicit val cnilReader: Read[CNil] = 23 | Read(s => throw new RuntimeException(s"Could not read $s into this coproduct")) 24 | 25 | implicit def coproductReader[H, T <: Coproduct](implicit h: Read[H], t: Read[T]): Read[H :+: T] = 26 | Read(s => Try(Inl(h(s))).getOrElse(Inr(t(s)))) 27 | } 28 | 29 | class ShapelessScannerSpec extends CommonSpec { 30 | import ShapelessScanner._ 31 | 32 | val text = """ 33 | 12 Bob True 34 | 13 Mary False 35 | 26 Rick True 36 | """ 37 | 38 | "Shapeless Scanner" should "parse HList" in { 39 | val in = Scanner(text) 40 | 41 | type Row = Int :: String :: Boolean :: HNil 42 | val out = Seq.fill(3)(in.next[Row]) 43 | assert( 44 | out == Seq( 45 | 12 :: "Bob" :: true :: HNil, 46 | 13 :: "Mary" :: false :: HNil, 47 | 26 :: "Rick" :: true :: HNil 48 | ) 49 | ) 50 | } 51 | 52 | "Shapeless Scanner" should "parse case class" in { 53 | val in = Scanner(text) 54 | 55 | case class Person(id: Int, name: String, isMale: Boolean) 56 | assert(in.next[Iterator[Person]].map(_.id).sum == 51) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/test/scala/better/files/CloseableIteratorSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.collection.compat._ 4 | import scala.util.Try 5 | 6 | class CloseableIteratorSpec extends CommonSpec { 7 | class TestIterator(n: Int) { 8 | var isClosed = false 9 | 10 | def vanilla() = (1 to n).iterator 11 | 12 | val iterator = CloseableIterator( 13 | vanilla(), 14 | () => { 15 | assert(!isClosed, "Already closed!") 16 | isClosed = true 17 | } 18 | ) 19 | } 20 | 21 | "closeable iterators" should "close" in { 22 | def check[A](name: String, f: Iterator[Int] => A) = withClue(name) { 23 | val n = 10 24 | val test = new TestIterator(n) 25 | f(test.iterator) match { 26 | case result: Iterator[_] => // Test when we make new iterators e.g. .map() 27 | assert(!test.isClosed, "We just made an iterator, closed must not be called yet") 28 | (1 to 4).foreach(_ => test.iterator.hasNext) // Call hasNext bunch of times to make sure we call close() atmost once 29 | assert(!test.isClosed) 30 | result.asInstanceOf[Iterator[A]].size // Trigger onComplete 31 | 32 | case (l: Iterator[_], r: Iterator[_]) => // Test .partition(), .span(), .duplicate() etc. 33 | assert(!test.isClosed, "Creating 2 iterators must not trigger close") 34 | assert(Try(l.isEmpty).isSuccess) 35 | assert(!test.isClosed, "Atleast l or r must be completed to trigger close") 36 | assert(Try(r.isEmpty).isSuccess) 37 | assert(!test.isClosed, "Atleast l or r must be completed to trigger close") 38 | l.size + r.size // Triggers completion 39 | 40 | case result => 41 | (1 to 4).foreach(_ => test.iterator.hasNext) // Call hasNext bunch of times to make sure we call close() atmost once 42 | assert(result == f(test.vanilla()), "Different result found over vanilla iterator") 43 | } 44 | assert(test.isClosed, "Iterator must be closed by now") 45 | assertThrows[IllegalStateException](test.iterator.next()) // , "Cannot call next() on a closed iterator") 46 | } 47 | 48 | check("zipWithIndex", _.zipWithIndex) 49 | check("takeLess", _.take(3)) 50 | check("takeMore", _.take(15)) 51 | check("dropLess", _.drop(3)) 52 | check("dropMore", _.drop(15)) 53 | check("sliceLess", _.slice(3, 7)) 54 | check("sliceMore", _.slice(3, 70)) 55 | check("doubleSlice", _.slice(2, 9).slice(1, 3)) 56 | check("sliceBeyond", _.slice(23, 700)) 57 | check("map", _.map(_.toString)) 58 | check("flatMapIterator", _.flatMap(i => Iterator(i, 2 * i))) 59 | check("flatMapEmpty", _.flatMap(_ => Iterator.empty)) 60 | check("flatMapList", _.flatMap(i => List(i, 2 * i))) 61 | check("filterNone", _.filter(_ < 0)) 62 | check("filterSome", _.filter(_ % 2 == 0)) 63 | check("collectNone", _.collect({ case i if i < 0 => i })) 64 | check("collectSome", _.collect({ case i if i % 2 == 0 => i })) 65 | check("scanLeft", _.scanLeft(0)(_ + _)) 66 | // check("scanRight", _.scanRight(0)(_ + _)) // scanRight does close because it needs to go to end 67 | check("takeNone", _.takeWhile(_ < 0)) 68 | check("takeSome", _.takeWhile(_ < 5)) 69 | check("dropNone", _.dropWhile(_ < 0)) 70 | check("dropSome", _.dropWhile(_ < 5)) 71 | check("partition", _.partition(_ % 2 == 0)) 72 | check("partitionSwap", _.partition(_ % 2 == 0).swap) 73 | check("filterPartition", _.filter(_ > 5).partition(_ % 2 == 0)) 74 | check("span", _.span(_ > 5)) 75 | check("spanEmpty", _.span(_ < 0)) 76 | check("padTo", _.padTo(100, 0)) 77 | check("padToLess", _.padTo(0, 0)) 78 | check("foreach", _.foreach(_ => ())) 79 | check("findNone", _.find(_ < 0)) 80 | check("findSome", _.find(_ == 3)) 81 | check("existsFalse", _.exists(_ < 0)) 82 | check("existsTrue", _.exists(_ == 5)) 83 | check("containsFalse", _.contains(5)) 84 | check("containsTrue", _.contains(-5)) 85 | check("indexOfFalse", _.indexOf(-5)) 86 | check("indexOfTrue", _.indexOf(5)) 87 | check("indexWhereFalse", _.indexWhere(_ < 0)) 88 | check("indexWhereTrue", _.indexWhere(_ > 5)) 89 | check("forAllFalse", _.forall(_ < 0)) 90 | check("forAllTrue", _.forall(_ < 100)) 91 | check("seq", _.iterator) 92 | check("buffered", _.buffered) 93 | check("zipLarge", _.zip(Iterator.continually(0))) 94 | check("zipSmall", _.zip(Iterator(1, 0))) 95 | check("zipWithIndex", _.zipWithIndex) 96 | check("zipEmpty", _.zip(Iterator.empty)) 97 | check("groupedLarge", _.grouped(100)) 98 | check("groupedSmall", _.grouped(2)) 99 | check("slidingSmall", _.sliding(2, 3)) 100 | check("slidingLarge", _.sliding(20, 3)) 101 | check("slidingLarge2", _.sliding(1, 23)) 102 | check("toList", _.toList) 103 | check("size", _.size) 104 | check("duplicate", _.duplicate) 105 | check("patch", _.patch(from = 3, patchElems = (1 to 5).iterator, replaced = 3)) 106 | check("appendAndTakeSome", it => (it ++ Iterator(11, 12, 13)).take(3)) 107 | check("appendAndTakeMore", it => (it ++ Iterator(11, 12, 13)).take(11)) 108 | } 109 | 110 | "closeable iterators" can "be chained" in { 111 | def check[A](name: String, f: (Iterator[Int], Iterator[Int]) => Iterator[A]) = withClue(name) { 112 | val n = 10 113 | val t1 = new TestIterator(n) 114 | val t2 = new TestIterator(2 * n) 115 | val resultIterator = f(t1.iterator, t2.iterator) 116 | assert(!t1.isClosed && !t2.isClosed, "Cannot be closed before evaluation") 117 | val result = resultIterator.toList // Trigger completion 118 | assert(t1.isClosed, "First close() was not invoked") 119 | assert(t2.isClosed, "Second close() was not invoked") 120 | assert(result === f(t1.vanilla(), t2.vanilla()).toList, "Different result found over vanilla iterator") 121 | } 122 | 123 | check("append", _ ++ _) 124 | check("zip", _.zip(_)) 125 | check("zipWithTake", (t1, t2) => t1.take(5).zip(t2.take(3))) 126 | check("zipAll", _.zipAll(_, -100, 100)) 127 | check("forComprehension", (t1, t2) => for { i <- t1; j <- t2 } yield i + j) 128 | } 129 | 130 | "non closing versions" should "not close on partial evals" in { 131 | val t1 = new TestIterator(10) 132 | assert(!t1.isClosed, "Cannot be closed before evaluation") 133 | assert(t1.iterator.take(3).size == 3) 134 | assert(t1.isClosed, "Must be closed") 135 | 136 | val t2 = new TestIterator(10) 137 | assert(!t2.isClosed, "Cannot be closed before evaluation") 138 | assert(t2.iterator.nonClosing().take(3).size == 3) 139 | assert(!t2.isClosed, "Non closing iterator should not close on partial evaluation") 140 | assert(t2.iterator.length > 0) 141 | assert(t2.isClosed, "Must close in the end") 142 | 143 | val t3 = new TestIterator(10) 144 | assert(!t3.isClosed, "Cannot be closed before evaluation") 145 | assert(t3.iterator.nonClosing(closeInTheEnd = false).take(3).size == 3) 146 | assert(!t3.isClosed, "Non closing iterator should not close on partial evaluation") 147 | assert(t3.iterator.length > 0) 148 | assert(t3.isClosed, "Must NOT close in the end") 149 | } 150 | 151 | "streams" can "be partitioned" in { 152 | File.usingTemporaryDirectory() { dir => 153 | (dir / "1.csv").touch() 154 | (dir / "2.csv").touch() 155 | (dir / "3.txt").touch() 156 | val (csv, other) = dir.listRecursively().partition(_.extension().contains(".csv")) 157 | assert(csv.size == 2) 158 | assert(other.size == 1) 159 | 160 | val (ones, twos) = dir.glob("*.csv").partition(_.nameWithoutExtension == "1") 161 | assert(ones.size == 1) 162 | assert(twos.size == 1) 163 | } 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /src/test/scala/better/files/CommonSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.concurrent.duration._ 4 | import scala.util.Properties._ 5 | 6 | import org.scalatest._ 7 | import org.scalatest.flatspec._ 8 | import org.scalatest.matchers.should.Matchers 9 | 10 | trait CommonSpec extends AnyFlatSpec with BeforeAndAfterEach with Matchers { 11 | def isLinux = osName.startsWith("Linux") 12 | 13 | val isCI = sys.env.get("CI").exists(_.toBoolean) 14 | 15 | val scalaVersion = versionNumberString 16 | 17 | def sleep(t: FiniteDuration = 2.second) = Thread.sleep(t.toMillis) 18 | } 19 | -------------------------------------------------------------------------------- /src/test/scala/better/files/DisposeSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.reflect.ClassTag 4 | 5 | import org.scalatest.matchers.{Matcher, MatchResult} 6 | 7 | class DisposeSpec extends CommonSpec { 8 | // Test classes 9 | 10 | private class TestDisposable extends AutoCloseable { 11 | var closeCount = 0 12 | 13 | override def close(): Unit = 14 | closeCount += 1 15 | } 16 | 17 | private class TestDisposableThatThrows extends TestDisposable { 18 | override def close(): Unit = { 19 | super.close() 20 | throw new TestDisposeException 21 | } 22 | } 23 | 24 | private class TestDisposableThatThrowsFatal extends TestDisposable { 25 | override def close(): Unit = { 26 | super.close() 27 | throw new TestDisposeFatalException 28 | } 29 | } 30 | 31 | private class TestEvalException extends Exception 32 | private class TestDisposeException extends Exception 33 | private class TestDisposeFatalException extends InterruptedException 34 | 35 | /** see https://dotty.epfl.ch/docs/reference/dropped-features/nonlocal-returns.html */ 36 | private class NonLocalReturn[A](val value: A) extends Exception 37 | 38 | // Custom matchers 39 | private class HaveSuppressedMatcher(classes: Class[_ <: Throwable]*) extends Matcher[Throwable] { 40 | override def apply(left: Throwable): MatchResult = { 41 | MatchResult( 42 | (classes corresponds left.getSuppressed) { (clazz, suppressed) => 43 | clazz isInstance suppressed 44 | }, 45 | s"had suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}", 46 | s"had not suppressed exceptions of types ${classes.map(_.getSimpleName).mkString(", ")}" 47 | ) 48 | } 49 | } 50 | 51 | private def haveSuppressed[E <: Throwable](implicit ct: ClassTag[E]) = 52 | new HaveSuppressedMatcher(ct.runtimeClass.asInstanceOf[Class[_ <: Throwable]]) 53 | 54 | // Test body 55 | 56 | behavior of "managed resources" 57 | 58 | it should "map correctly" in { 59 | val t = new TestDisposable 60 | 61 | val result = for { 62 | _ <- t.autoClosed 63 | } yield { 64 | t.closeCount shouldBe 0 65 | "hello" 66 | } 67 | 68 | result.get() shouldBe "hello" 69 | t.closeCount shouldBe 1 70 | } 71 | 72 | it should "flatMap correctly" in { 73 | val t = new TestDisposable 74 | 75 | val result = (for { 76 | _ <- t.autoClosed 77 | v <- Iterator("one", "two", "three") 78 | } yield { 79 | t.closeCount shouldBe 0 80 | v 81 | }).toSeq 82 | 83 | result should contain inOrder ("one", "two", "three") 84 | t.closeCount shouldBe 1 85 | } 86 | 87 | it should "handle exceptions correctly" in { 88 | val t = new TestDisposable 89 | 90 | a[TestEvalException] should be thrownBy { 91 | for { 92 | _ <- t.autoClosed 93 | } { 94 | t.closeCount shouldBe 0 95 | throw new TestEvalException 96 | } 97 | } 98 | t.closeCount shouldBe 1 99 | 100 | var lastSeen = "" 101 | a[TestEvalException] should be thrownBy { 102 | for { 103 | _ <- t.autoClosed 104 | v <- Iterator("one", "two", "three") 105 | } { 106 | t.closeCount shouldBe 1 107 | lastSeen = v 108 | if (v == "two") throw new TestEvalException 109 | } 110 | } 111 | t.closeCount shouldBe 2 112 | lastSeen shouldBe "two" 113 | } 114 | 115 | it should "handle disposal exceptions correctly" in { 116 | // For some mysterious reason, thrownBy doesn't work here, in this specific test case. No clue why, despite spending an entire day trying to figure it out, 117 | // including repeatedly stepping through the innards of ScalaTest in a debugger. Catching the exception manually does work, though. 118 | val messageNoException = "no exception was thrown" 119 | def messageWrongException(e: Throwable): String = 120 | s"an exception was thrown, but not a TestDisposeException; instead it's a ${e.getClass.getName}" 121 | 122 | val t = new TestDisposableThatThrows 123 | 124 | val e1 = 125 | try { 126 | for { 127 | _ <- t.autoClosed 128 | } { 129 | t.closeCount shouldBe 0 130 | } 131 | None 132 | } catch { 133 | case e: TestDisposeException => 134 | Some(e) 135 | } 136 | assert(e1.nonEmpty, messageNoException) 137 | e1 foreach { e1c => 138 | assert(e1c.isInstanceOf[TestDisposeException], messageWrongException(e1c)) 139 | } 140 | t.closeCount shouldBe 1 141 | 142 | var lastSeen = "" 143 | val e2 = 144 | try { 145 | val i = for { 146 | _ <- t.autoClosed 147 | v <- Iterator("one", "two", "three") 148 | } yield { 149 | t.closeCount shouldBe 1 150 | lastSeen = v 151 | v 152 | } 153 | while (i.hasNext) i.next() 154 | None 155 | } catch { 156 | case e: TestDisposeException => 157 | Some(e) 158 | } 159 | lastSeen shouldBe "three" 160 | assert(e2.nonEmpty, messageNoException) 161 | e2 foreach { e2c => 162 | assert(e2c.isInstanceOf[TestDisposeException], messageWrongException(e2c)) 163 | } 164 | t.closeCount shouldBe 2 165 | } 166 | 167 | it should "handle non-local returns correctly" in { 168 | val t = new TestDisposable 169 | 170 | def doTheThing(): String = 171 | try { 172 | for { 173 | _ <- t.autoClosed 174 | } { 175 | t.closeCount shouldBe 0 176 | throw new NonLocalReturn("hello") 177 | } 178 | "can't reach here" 179 | } catch { 180 | case ex: NonLocalReturn[_] => ex.value.asInstanceOf[String] 181 | } 182 | 183 | doTheThing() shouldBe "hello" 184 | t.closeCount shouldBe 1 185 | 186 | def doTheThings(): String = 187 | try { 188 | for { 189 | _ <- t.autoClosed 190 | v <- Iterator("one", "two", "three") 191 | } { 192 | t.closeCount shouldBe 1 193 | if (v == "two") throw new NonLocalReturn(v) 194 | } 195 | "can't reach here" 196 | } catch { 197 | case ex: NonLocalReturn[_] => ex.value.asInstanceOf[String] 198 | } 199 | 200 | doTheThings() shouldBe "two" 201 | t.closeCount shouldBe 2 202 | } 203 | 204 | it should "handle multiple exceptions correctly" in { 205 | val t = new TestDisposableThatThrows 206 | 207 | the[TestEvalException] thrownBy { 208 | for { 209 | _ <- t.autoClosed 210 | } { 211 | t.closeCount shouldBe 0 212 | throw new TestEvalException 213 | } 214 | } should haveSuppressed[TestDisposeException] 215 | t.closeCount shouldBe 1 216 | 217 | var lastSeen = "" 218 | the[TestEvalException] thrownBy { 219 | for { 220 | _ <- t.autoClosed 221 | v <- Iterator("one", "two", "three") 222 | } { 223 | t.closeCount shouldBe 1 224 | lastSeen = v 225 | if (v == "two") throw new TestEvalException 226 | } 227 | } should haveSuppressed[TestDisposeException] 228 | lastSeen shouldBe "two" 229 | t.closeCount shouldBe 2 230 | } 231 | 232 | it should "give fatal exceptions precedence" in { 233 | val t = new TestDisposableThatThrowsFatal 234 | 235 | the[TestDisposeFatalException] thrownBy { 236 | for { 237 | _ <- t.autoClosed 238 | } { 239 | t.closeCount shouldBe 0 240 | throw new TestEvalException 241 | } 242 | } should haveSuppressed[TestEvalException] 243 | t.closeCount shouldBe 1 244 | 245 | var lastSeen = "" 246 | the[TestDisposeFatalException] thrownBy { 247 | for { 248 | _ <- t.autoClosed 249 | v <- Iterator("one", "two", "three") 250 | } { 251 | t.closeCount shouldBe 1 252 | lastSeen = v 253 | if (v == "two") throw new TestEvalException 254 | } 255 | } should haveSuppressed[TestEvalException] 256 | t.closeCount shouldBe 2 257 | lastSeen shouldBe "two" 258 | } 259 | 260 | it should "support for-comprehension" in { 261 | val data = List( 262 | List("key", "value"), 263 | List("hello", "0"), 264 | List("world", "1") 265 | ).map(_.mkString(",")) 266 | 267 | File.usingTemporaryFile() { f => 268 | for { 269 | pw <- f.printWriter() 270 | // TODO: Following couple of lines fails here https://travis-ci.com/github/pathikrit/better-files/jobs/500762452 271 | // _ :: rows = data 272 | // row <- rows 273 | row <- data.tail 274 | } pw.println(row) 275 | 276 | val expected = data.tail 277 | 278 | assert(f.contentAsString() === expected.mkString("", "\n", "\n")) 279 | 280 | val actual = for { 281 | reader <- f.bufferedReader() 282 | line <- reader.lines().toAutoClosedIterator.toList 283 | } yield line 284 | 285 | assert(actual.toSeq === expected) 286 | } 287 | } 288 | 289 | it should "handle multiple managed resources" in { 290 | var log = List.empty[String] 291 | 292 | def dummyClosable(msg: String): AutoCloseable = 293 | new AutoCloseable { override def close() = log = msg :: log } 294 | 295 | for { 296 | _ <- dummyClosable("outer").autoClosed 297 | _ <- dummyClosable("inner").autoClosed 298 | } () 299 | 300 | assert(log === "outer" :: "inner" :: Nil) 301 | } 302 | 303 | it should "handle multiple managed resources in a flatmap" in { 304 | var log = List.empty[String] 305 | 306 | def dummyClosable(msg: String): AutoCloseable = 307 | new AutoCloseable { override def close() = log = msg :: log } 308 | 309 | val x = for { 310 | _ <- dummyClosable("outer").autoClosed 311 | _ <- dummyClosable("inner").autoClosed 312 | } yield 8 313 | 314 | assert(log.isEmpty) 315 | assert(x.get() === 8) 316 | assert(log === "outer" :: "inner" :: Nil) 317 | } 318 | 319 | it should "handle multiple operations on managed resources" in { 320 | var log = List.empty[String] 321 | 322 | def dummyClosable(msg: String): AutoCloseable = 323 | new AutoCloseable { override def close() = log = msg :: log } 324 | 325 | def doSomething(c1: AutoCloseable, c2: AutoCloseable): Unit = println(s"$c1 $c2") 326 | 327 | for { 328 | t1 <- dummyClosable("outer").autoClosed 329 | t2 <- dummyClosable("inner").autoClosed 330 | } doSomething(t1, t2) 331 | 332 | assert(log === "outer" :: "inner" :: Nil) 333 | } 334 | } 335 | -------------------------------------------------------------------------------- /src/test/scala/better/files/FileMonitorSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.concurrent.ExecutionContext.Implicits.global 4 | import scala.concurrent.duration._ 5 | import scala.language.postfixOps 6 | 7 | class FileMonitorSpec extends CommonSpec { 8 | "file watcher" should "watch single files" in { 9 | assume(isCI) 10 | val file = File.newTemporaryFile(suffix = ".txt").writeText("Hello world") 11 | 12 | var log = List.empty[String] 13 | def output(msg: String) = 14 | synchronized { 15 | println(msg) 16 | log = msg :: log 17 | } 18 | 19 | /** ************************************************************************ 20 | */ 21 | 22 | val watcher = new FileMonitor(file) { 23 | override def onCreate(file: File, count: Int) = output(s"$file got created $count time(s)") 24 | override def onModify(file: File, count: Int) = output(s"$file got modified $count time(s)") 25 | override def onDelete(file: File, count: Int) = output(s"$file got deleted $count time(s)") 26 | } 27 | watcher.start() 28 | 29 | /** *********************************************************************** 30 | */ 31 | 32 | sleep(5 seconds) 33 | file.writeText("hello world"); sleep() 34 | file.clear(); sleep() 35 | file.writeText("howdy"); sleep() 36 | file.delete(); sleep() 37 | sleep(5 seconds) 38 | val sibling = (file.parent / "t1.txt").createIfNotExists(); sleep() 39 | sibling.writeText("hello world"); sleep() 40 | sleep(20 seconds) 41 | 42 | log.size should be >= 2 43 | log.exists(_ contains sibling.name) shouldBe false 44 | log.forall(_ contains file.name) shouldBe true 45 | } 46 | 47 | ignore should "watch directories to configurable depth" in { 48 | assume(isCI) 49 | val dir = File.newTemporaryDirectory() 50 | (dir / "a" / "b" / "c" / "d" / "e").createDirectories() 51 | var log = List.empty[String] 52 | def output(msg: String) = 53 | synchronized { 54 | log = msg :: log 55 | } 56 | 57 | val watcher = new FileMonitor(dir, maxDepth = 2) { 58 | override def onCreate(file: File, count: Int) = output(s"Create happened on ${file.name} $count times") 59 | } 60 | watcher.start() 61 | 62 | sleep(5 seconds) 63 | (dir / "a" / "b" / "t1").touch().writeText("hello world"); sleep() 64 | (dir / "a" / "b" / "c" / "d" / "t1").touch().writeText("hello world"); sleep() 65 | sleep(10 seconds) 66 | 67 | withClue(log) { 68 | log.size shouldEqual 1 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/test/scala/better/files/FileSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.nio.file.{FileAlreadyExistsException, Files => JFiles, FileSystems} 4 | import java.nio.file.AccessDeniedException 5 | 6 | import scala.collection.compat.immutable.LazyList 7 | import scala.language.postfixOps 8 | import scala.util.{Properties, Try} 9 | 10 | import better.files.Dsl._ 11 | import better.files.File.{home, root} 12 | 13 | class FileSpec extends CommonSpec { 14 | 15 | /** try to cope with windows, which will return e.g. c:\ as root */ 16 | val rootStr = FileSystems.getDefault.getRootDirectories.iterator().next().toString 17 | import java.io.File.{separator, separatorChar} 18 | 19 | /** Helper for unix -> windows path references (as strings). 20 | * 21 | * @param path as unix path 22 | * @return path in native format 23 | */ 24 | def unixToNative(path: String): String = { 25 | if (Properties.isWin) { 26 | path 27 | .replaceFirst("^/", rootStr.replace("\\", "\\\\")) // we must escape '\' in C:\ 28 | .replace("/", separator) 29 | } else { 30 | path 31 | } 32 | } 33 | 34 | var testRoot: File = _ 35 | var fa: File = _ 36 | var a1: File = _ 37 | var a2: File = _ 38 | var t1: File = _ 39 | var t2: File = _ 40 | var t3: File = _ 41 | var fb: File = _ 42 | var b1: File = _ 43 | var b2: File = _ 44 | 45 | /** Setup the following directory structure under root 46 | * /a 47 | * /a1 48 | * /a2 49 | * a21.txt 50 | * a22.txt 51 | * /b 52 | * b1/ --> ../a1 53 | * b2.txt --> ../a2/a22.txt 54 | */ 55 | override def beforeEach() = { 56 | testRoot = File.newTemporaryDirectory("better-files") 57 | fa = testRoot / "a" 58 | a1 = testRoot / "a" / "a1" 59 | a2 = testRoot / "a" / "a2" 60 | t1 = testRoot / "a" / "a1" / "t1.txt" 61 | t2 = testRoot / "a" / "a1" / "t2.txt" 62 | t3 = testRoot / "a" / "a1" / "t3.scala.txt" 63 | fb = testRoot / "b" 64 | b1 = testRoot / "b" / "b1" 65 | b2 = testRoot / Symbol("b") / "b2.txt" 66 | Seq(a1, a2, fb) foreach mkdirs 67 | Seq(t1, t2) foreach touch 68 | } 69 | 70 | override def afterEach() = 71 | rm(testRoot) 72 | 73 | override def withFixture(test: NoArgTest) = { 74 | // val before = File.numberOfOpenFileDescriptors() 75 | val result = super.withFixture(test) 76 | // val after = File.numberOfOpenFileDescriptors() 77 | // assert(before == after, s"Resource leakage detected in $test") 78 | result 79 | } 80 | 81 | "files" can "be instantiated" in { 82 | import java.io.{File => JFile} 83 | 84 | val f = File("/User/johndoe/Documents") // using constructor 85 | val f1: File = file"/User/johndoe/Documents" // using string interpolator 86 | val f2: File = "/User/johndoe/Documents".toFile // convert a string path to a file 87 | val f3: File = new JFile("/User/johndoe/Documents").toScala // convert a Java file to Scala 88 | val f4: File = root / "User" / "johndoe" / "Documents" // using root helper to start from root 89 | // val f5: File = `~` / "Documents" // also equivalent to `home / "Documents"` 90 | val f6: File = "/User" / "johndoe" / "Documents" // using file separator DSL 91 | val f7: File = home / "Documents" / "presentations" / `..` // Use `..` to navigate up to parent 92 | val f8: File = root / "User" / "johndoe" / "Documents" / `.` 93 | val f9: File = File(f.uri) 94 | val f10: File = File("../a") // using a relative path 95 | Seq(f, f1, f2, f3, f4, /* f5,*/ f6, f7, f8, f9, f10) foreach { f => 96 | f.pathAsString should not include ".." 97 | } 98 | 99 | root.toString shouldEqual rootStr 100 | home.toString.count(_ == separatorChar) should be > 1 101 | (root / "usr" / "johndoe" / "docs").toString shouldEqual unixToNative("/usr/johndoe/docs") 102 | Seq(f, f1, f2, f4, /*f5,*/ f6, f8, f9).map(_.toString).toSet shouldBe Set(f.toString) 103 | } 104 | 105 | it can "be instantiated with anchor" in { 106 | // testRoot / a / a1 / t1.txt 107 | val basedir = a1 108 | File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) 109 | File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) 110 | 111 | File(basedir, "rel/path/to/loc").toString should be(unixToNative(basedir.toString + "/rel/path/to/loc")) 112 | File(basedir, "../rel/path/to/loc").toString should be(unixToNative(fa.toString + "/rel/path/to/loc")) 113 | File(basedir, "../", "rel", "path", "to", "loc").toString should be(unixToNative(fa.toString + "/rel/path/to/loc")) 114 | 115 | val baseref = t1 116 | File(baseref, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) 117 | File(baseref, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) 118 | 119 | File(baseref, "rel/path/to/loc").toString should be(unixToNative(a1.toString + "/rel/path/to/loc")) 120 | File(baseref, "../rel/path/to/loc").toString should be(unixToNative(fa.toString + "/rel/path/to/loc")) 121 | File(basedir, "../", "rel", "path", "to", "loc").toString should be(unixToNative(fa.toString + "/rel/path/to/loc")) 122 | } 123 | 124 | it can "be instantiated with non-existing abs anchor" in { 125 | val anchorStr = "/abs/to/nowhere" 126 | val anchorStr_a = anchorStr + "/a" 127 | val basedir = File(anchorStr_a + "/last") 128 | 129 | File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) 130 | File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) 131 | 132 | File(basedir, "rel/path/to/loc").toString should be(unixToNative(anchorStr_a + "/rel/path/to/loc")) 133 | File(basedir, "../rel/path/to/loc").toString should be(unixToNative(anchorStr + "/rel/path/to/loc")) 134 | File(basedir, "../", "rel", "path", "to", "loc").toString should be(unixToNative(anchorStr + "/rel/path/to/loc")) 135 | } 136 | 137 | it can "be instantiated with non-existing relative anchor" in { 138 | val relAnchor = File("rel/anc/b/last") 139 | val basedir = relAnchor 140 | 141 | File(basedir, "/abs/path/to/loc").toString should be(unixToNative("/abs/path/to/loc")) 142 | File(basedir, "/abs", "path", "to", "loc").toString should be(unixToNative("/abs/path/to/loc")) 143 | 144 | File(basedir, "rel/path/to/loc").toString should be(unixToNative(File("rel/anc/b").toString + "/rel/path/to/loc")) 145 | File(basedir, "../rel/path/to/loc").toString should be(unixToNative(File("rel/anc").toString + "/rel/path/to/loc")) 146 | File(basedir, "../", "rel", "path", "to", "loc").toString should be( 147 | unixToNative(File("rel/anc").toString + "/rel/path/to/loc") 148 | ) 149 | } 150 | 151 | it should "do basic I/O" in { 152 | t1 < "hello" 153 | t1.contentAsString() shouldEqual "hello" 154 | t1.appendLine() << "world" 155 | (t1 !) shouldEqual String.format("hello%nworld%n") 156 | t1.chars().to(LazyList) should contain theSameElementsInOrderAs String.format("hello%nworld%n").toSeq 157 | "foo" `>:` t1 158 | "bar" >>: t1 159 | t1.contentAsString() shouldEqual String.format("foobar%n") 160 | t1.appendLines("hello", "world") 161 | t1.contentAsString() shouldEqual String.format("foobar%nhello%nworld%n") 162 | t2.writeText("hello").appendText("world").contentAsString() shouldEqual "helloworld" 163 | 164 | (testRoot / "diary") 165 | .createFileIfNotExists() 166 | .appendLine() 167 | .appendLines("My name is", "Inigo Montoya") 168 | .printLines(Iterator("x", 1)) 169 | .lines() 170 | .toSeq should contain theSameElementsInOrderAs Seq("", "My name is", "Inigo Montoya", "x", "1") 171 | } 172 | 173 | it should "handle BOM" in { 174 | val lines = Seq("Line 1", "Line 2") 175 | val expectedContent = lines.mkString(start = "", sep = "\n", end = "\n") 176 | File.temporaryFile() foreach { file => 177 | file.appendLines(lines, charset = UnicodeCharset("UTF-8", writeByteOrderMarkers = true)) 178 | file.contentAsString(charset = "UTF-8") should not equal expectedContent 179 | file.contentAsString() shouldEqual expectedContent 180 | } 181 | } 182 | 183 | // it should "glob" in { 184 | // assume(isCI) 185 | // a1.glob("*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt") 186 | // //a1.glob("*.txt").map(_.name).toSeq shouldEqual Seq("t1.txt", "t2.txt") 187 | // testRoot.glob("**/*.txt").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt") 188 | // val path = testRoot.path.toString.ensuring(testRoot.path.isAbsolute) 189 | // File(path).glob("**/*.{txt}").map(_.name).toSeq.sorted shouldEqual Seq("t1.txt", "t2.txt") 190 | // ("benchmarks"/"src").glob("**/*.{scala,java}").map(_.name).toSeq.sorted shouldEqual Seq("ArrayBufferScanner.java", "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") 191 | // ("benchmarks"/"src").glob("**/*.{scala}").map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") 192 | // ("benchmarks"/"src").glob("**/*.scala").map(_.name).toSeq.sorted shouldEqual Seq("Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") 193 | // ("benchmarks"/"src").listRecursively.filter(_.extension.contains(".scala")).map(_.name).toSeq.sorted shouldEqual Seq( "Benchmark.scala", "EncodingBenchmark.scala", "ScannerBenchmark.scala", "Scanners.scala") 194 | // ls("core"/"src"/"test") should have length 1 195 | // ("core"/"src"/"test").walk(maxDepth = 1) should have length 2 196 | // ("core"/"src"/"test").walk(maxDepth = 0) should have length 1 197 | // ("core"/"src"/"test").walk() should have length (("core"/"src"/"test").listRecursively.length + 1L) 198 | // ls_r("core"/"src"/"test") should have length 8 199 | // } 200 | 201 | it should "support names/extensions" in { 202 | assert(File("zzz").changeExtensionTo("ddd").name === "zzz.ddd") 203 | assert(File("zzz").changeExtensionTo(".ddd").name === "zzz.ddd") 204 | fa.extension() shouldBe None 205 | fa.nameWithoutExtension shouldBe fa.name 206 | t1.extension() shouldBe Some(".txt") 207 | t1.extension(includeDot = false) shouldBe Some("txt") 208 | t3.extension() shouldBe Some(".txt") 209 | t3.extension(includeAll = true) shouldBe Some(".scala.txt") 210 | t3.extension(includeDot = false, includeAll = true) shouldBe Some("scala.txt") 211 | t1.name shouldBe "t1.txt" 212 | t1.nameWithoutExtension shouldBe "t1" 213 | t1.changeExtensionTo(".md").name shouldBe "t1.md" 214 | (t1 < "hello world").changeExtensionTo(".txt").name shouldBe "t1.txt" 215 | // t1.contentType shouldBe Some("text/plain") 216 | ("src" / "test").toString should include("better-files") 217 | (t1.contentAsString() == t1.toString) shouldBe false 218 | t1.root shouldEqual fa.root 219 | file"/tmp/foo.scala.html".extension() shouldBe Some(".html") 220 | file"/tmp/foo.scala.html".nameWithoutExtension shouldBe "foo" 221 | file"/tmp/foo.scala.html".nameWithoutExtension(includeAll = false) shouldBe "foo.scala" 222 | root.name shouldBe "" 223 | } 224 | 225 | it should "hide/unhide" in { 226 | t1.isHidden shouldBe false 227 | } 228 | 229 | it should "support parent/child" in { 230 | fa isChildOf testRoot shouldBe true 231 | testRoot isChildOf root shouldBe true 232 | root isChildOf root shouldBe false 233 | fa isChildOf fa shouldBe false 234 | fa isParentOf fa shouldBe false 235 | b2 isChildOf b2 shouldBe false 236 | b2 isChildOf b2.parent shouldBe true 237 | root.parent shouldBe null 238 | } 239 | 240 | it should "support siblings" in { 241 | (file"/tmp/foo.txt" sibling "bar.txt").pathAsString shouldBe unixToNative("/tmp/bar.txt") 242 | fa.siblings.toList.map(_.name) shouldBe List("b") 243 | fb isSiblingOf fa shouldBe true 244 | } 245 | 246 | it should "support sorting" in { 247 | testRoot.list.toSeq.sorted(File.Order.byName) should not be empty 248 | testRoot.list.toSeq.max(File.Order.bySize).isEmpty() shouldBe false 249 | Seq(fa, fb).contains(testRoot.list.toSeq.min(File.Order.byDepth)) shouldBe true 250 | sleep() 251 | t2.appendLine("modified!") 252 | a1.list.toSeq.min(File.Order.byModificationTime) shouldBe t1 253 | testRoot.list.toSeq.sorted(File.Order.byDirectoriesFirst) should not be empty 254 | } 255 | 256 | it must "have .size" in { 257 | fb.isEmpty() shouldBe true 258 | t1.size() shouldBe 0 259 | t1.writeText("Hello World") 260 | t1.size() should be > 0L 261 | testRoot.size() should be > (t1.size() + t2.size()) 262 | } 263 | 264 | // NOTE: Commented out because it's no longer needed. Expected to be vetted and removed by the project maintainer. 265 | // it should "set/unset permissions" in { 266 | // assume(isCI) 267 | // import java.nio.file.attribute.PosixFilePermission 268 | // //an[UnsupportedOperationException] should be thrownBy t1.dosAttributes 269 | // t1.permissions()(PosixFilePermission.OWNER_EXECUTE) shouldBe false 270 | // 271 | // chmod_+(PosixFilePermission.OWNER_EXECUTE, t1) 272 | // t1.testPermission(PosixFilePermission.OWNER_EXECUTE) shouldBe true 273 | // t1.permissionsAsString shouldBe "rwxrw-r--" 274 | // 275 | // chmod_-(PosixFilePermission.OWNER_EXECUTE, t1) 276 | // t1.isOwnerExecutable shouldBe false 277 | // t1.permissionsAsString shouldBe "rw-rw-r--" 278 | // } 279 | 280 | it should "support equality" in { 281 | import better.files.Dsl._ 282 | fa shouldEqual (testRoot / "a") 283 | fa shouldNot equal(testRoot / "b") 284 | val c1 = fa.md5() 285 | fa.md5() shouldEqual c1 286 | t1 < "hello" 287 | t2 < "hello" 288 | (t1 == t2) shouldBe false 289 | (t1 === t2) shouldBe true 290 | t2 < "hello world" 291 | (t1 == t2) shouldBe false 292 | (t1 === t2) shouldBe false 293 | fa.md5() should not equal c1 294 | } 295 | 296 | it should "create if not exist directory structures" in { 297 | File.usingTemporaryDirectory() { dir => 298 | val file = dir / "a" / "b" / "c.txt" 299 | assert(file.notExists()) 300 | assert(file.parent.notExists()) 301 | file.createIfNotExists(createParents = true) 302 | assert(file.exists()) 303 | assert(file.parent.exists()) 304 | file.writeText("Hello world") 305 | assert(file.contentAsString() === "Hello world") 306 | } 307 | } 308 | 309 | it should "treat symlinks transparently in convenience methods" in { 310 | File.usingTemporaryDirectory() { dir => 311 | val realDir = dir / "a" 312 | val dirSymlink = dir / "b" 313 | realDir.createDirectory() 314 | JFiles.createSymbolicLink(dirSymlink.path, realDir.path) 315 | dirSymlink.createDirectories() 316 | a[FileAlreadyExistsException] should be thrownBy dirSymlink.createDirectories(linkOptions = File.LinkOptions.noFollow) 317 | /*a[FileAlreadyExistsException] shouldNot be thrownBy*/ 318 | dirSymlink.createDirectories() 319 | } 320 | } 321 | 322 | it should "support chown/chgrp" in { 323 | fa.ownerName() should not be empty 324 | fa.groupName() should not be empty 325 | a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chown("hitler", fa) 326 | // a[java.nio.file.FileSystemException] should be thrownBy chown("root", fa) 327 | a[java.nio.file.attribute.UserPrincipalNotFoundException] should be thrownBy chgrp("cool", fa) 328 | // a[java.nio.file.FileSystemException] should be thrownBy chown("admin", fa) 329 | // fa.chown("nobody").chgrp("nobody") 330 | stat(t1) shouldBe a[java.nio.file.attribute.PosixFileAttributes] 331 | } 332 | 333 | it should "detect file locks" in { 334 | File.temporaryFile() foreach { file => 335 | def lockInfo() = file.isReadLocked() -> file.isWriteLocked() 336 | // TODO: Why is file.isReadLocked() should be false? 337 | lockInfo() shouldBe (true -> false) 338 | val channel = file.newRandomAccess(File.RandomAccessMode.readWrite).getChannel 339 | val lock = channel.tryLock() 340 | lockInfo() shouldBe (true -> true) 341 | lock.release() 342 | channel.close() 343 | lockInfo() shouldBe (true -> false) 344 | } 345 | } 346 | 347 | it should "support ln/cp/mv" in { 348 | val magicWord = "Hello World" 349 | t1 writeText magicWord 350 | // link 351 | // to relative target 352 | val b0 = b1.sibling("b0") 353 | java.nio.file.Files.createSymbolicLink(b0.path, java.nio.file.Paths.get("b1")) 354 | b0.symbolicLink should not be empty 355 | b0.symbolicLink.get.path.isAbsolute shouldBe false 356 | // to absolute target 357 | b1.linkTo(a1, symbolic = true) 358 | ln_s(b2, t2) 359 | (b1 / "t1.txt").contentAsString() shouldEqual magicWord 360 | // copy 361 | b2.contentAsString() shouldBe empty 362 | t1.md5() should not equal t2.md5() 363 | a[java.nio.file.FileAlreadyExistsException] should be thrownBy (t1 copyTo t2) 364 | t1.copyTo(t2, overwrite = true) 365 | t1.exists() shouldBe true 366 | t1.md5() shouldEqual t2.md5() 367 | b2.contentAsString() shouldEqual magicWord 368 | // rename 369 | t2.name shouldBe "t2.txt" 370 | t2.exists() shouldBe true 371 | val t3 = t2 renameTo "t3.txt" 372 | t3.name shouldBe "t3.txt" 373 | t2.exists() shouldBe false 374 | t3.exists() shouldBe true 375 | // move 376 | t3 moveTo t2 377 | t2.exists() shouldBe true 378 | t3.exists() shouldBe false 379 | } 380 | 381 | it should "support creating hard links with ln" in { 382 | val magicWord = "Hello World" 383 | t1 writeText magicWord 384 | t1.linkTo(t3, symbolic = false) 385 | (a1 / "t3.scala.txt").contentAsString() shouldEqual magicWord 386 | } 387 | 388 | it should "support custom charset" in { 389 | import java.nio.charset.Charset 390 | t1.writeText("你好世界", charset = "UTF8") 391 | t1.contentAsString(charset = "ISO-8859-1") should not equal "你好世界" 392 | t1.contentAsString(charset = "UTF8") shouldEqual "你好世界" 393 | val c1 = md5(t1) 394 | val c2 = t1.overwrite("你好世界", File.OpenOptions.default, Charset.forName("ISO-8859-1")).md5() 395 | c1 should not equal c2 396 | c2 shouldEqual t1.checksum("md5") 397 | } 398 | 399 | it should "read chinese" in { 400 | val lines = "src/test/resources/better/files/issues-326.txt".toFile.lines().toSeq 401 | assert(lines.length > 20) 402 | } 403 | 404 | it should "support hashing algos" in { 405 | val charset = java.nio.charset.StandardCharsets.UTF_8 406 | t1.writeText("", charset = charset) 407 | md5(t1) shouldEqual "D41D8CD98F00B204E9800998ECF8427E" 408 | sha1(t1) shouldEqual "DA39A3EE5E6B4B0D3255BFEF95601890AFD80709" 409 | sha256(t1) shouldEqual "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" 410 | sha512(t1) shouldEqual 411 | "CF83E1357EEFB8BDF1542850D66D8007D620E4050B5715DC83F4A921D36CE9CE47D0D13C5D85F2B0FF8318D2877EEC2F63B931BD47417A81A538327AF927DA3E" 412 | } 413 | 414 | it should "compute correct checksum for non-zero length string" in { 415 | val charset = java.nio.charset.StandardCharsets.UTF_8 416 | t1.writeText("test", charset = charset) 417 | md5(t1) shouldEqual "098F6BCD4621D373CADE4E832627B4F6" 418 | sha1(t1) shouldEqual "A94A8FE5CCB19BA61C4C0873D391E987982FBBD3" 419 | sha256(t1) shouldEqual "9F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A08" 420 | sha512(t1) shouldEqual 421 | "EE26B0DD4AF7E749AA1A8EE3C10AE9923F618980772E473F8819A5D4940E0DB27AC185F8A0E1D5F84F88BC887FD67B143732C304CC5FA9AD8E6F57F50028A8FF" 422 | } 423 | 424 | it should "copy" in { 425 | (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World") 426 | (fb / "t5" / "t5.txt").createIfNotExists(createParents = true).writeText("Scala Awesome") 427 | (fb / "t5" / "t3").notExists() shouldBe true 428 | cp(fb / "t3", fb / "t5") 429 | (fb / "t3").exists() shouldBe true 430 | (fb / "t5" / "t3").exists() shouldBe true 431 | (fb / "t5" / "t5.txt").contentAsString() shouldEqual "Scala Awesome" 432 | assert((fb / "t3") isSameContentAs (fb / "t5" / "t3")) 433 | } 434 | 435 | it should "move" in { 436 | (fb / "t3" / "t4.txt").createIfNotExists(createParents = true).writeText("Hello World") 437 | mv(fb / "t3", fb / "t5") 438 | (fb / "t5" / "t4.txt").contentAsString() shouldEqual "Hello World" 439 | (fb / "t3").notExists() shouldBe true 440 | } 441 | 442 | it should "delete" in { 443 | fb.exists() shouldBe true 444 | fb.delete() 445 | fb.exists() shouldBe false 446 | } 447 | 448 | it should "touch" in { 449 | (fb / "z1").exists() shouldBe false 450 | (fb / "z1").isEmpty() shouldBe true 451 | (fb / "z1").touch() 452 | (fb / "z1").exists() shouldBe true 453 | (fb / "z1").isEmpty() shouldBe true 454 | Thread.sleep(1000) 455 | (fb / "z1").lastModifiedTime().getEpochSecond should be < (fb / "z1").touch().lastModifiedTime().getEpochSecond 456 | } 457 | 458 | it should "md5" in { 459 | val h1 = t1.hashCode 460 | val actual = (t1 < "hello world").md5() 461 | val h2 = t1.hashCode 462 | h1 shouldEqual h2 463 | import scala.sys.process._ 464 | val expected = Try(s"md5sum ${t1.path}" !!) getOrElse (s"md5 ${t1.path}" !!) 465 | expected.toUpperCase should include(actual) 466 | actual should not equal h1 467 | actual shouldEqual t1.newInputStream().withMessageDigest("md5").hexDigest() 468 | } 469 | 470 | it should "support file in/out" in { 471 | t1 < "hello world" 472 | for { 473 | in <- t1.inputStream() 474 | out <- t2.outputStream() 475 | } in.pipeTo(out) 476 | t2.contentAsString() shouldEqual "hello world" 477 | t2.newInputStream().asString() shouldEqual "hello world" 478 | } 479 | 480 | it should "zip/unzip directories" in { 481 | assume(Properties.javaVersion.startsWith("1.8")) 482 | t1.writeText("hello world") 483 | val zipFile = testRoot.zip() 484 | zipFile.size() should be > 100L 485 | zipFile.name should endWith(".zip") 486 | 487 | def test(output: File) = { 488 | import better.files.Dsl._ 489 | (output / "a" / "a1" / "t1.txt").contentAsString() shouldEqual "hello world" 490 | output === testRoot shouldBe true 491 | (output / "a" / "a1" / "t1.txt").overwrite("hello") 492 | (output !== testRoot) shouldBe true 493 | } 494 | 495 | test(zipFile.unzip()) 496 | test(zipFile.streamedUnzip()) 497 | } 498 | 499 | it should "zip/unzip single files" in { 500 | assume(Properties.javaVersion.startsWith("1.8")) 501 | t1.writeText("hello world") 502 | val zipFile = t1.zip() 503 | zipFile.size() should be > 100L 504 | zipFile.name should endWith(".zip") 505 | val destination = unzip(zipFile)(File.newTemporaryDirectory()) 506 | (destination / "t1.txt").contentAsString() shouldEqual "hello world" 507 | } 508 | 509 | it should "zip/unzip multiple files" in { 510 | File.usingTemporaryDirectory() { dir => 511 | val f1 = (dir / Symbol("f1")).touch().appendLines("Line 1", "Line 2") 512 | val f2 = (dir / Symbol("f2")).touch().appendLines("Line 3", "Line 4") 513 | val zipFile = (dir / "f.zip").zipIn(Iterator(f1, f2)) 514 | val lines = zipFile.newZipInputStream().foldMap(_.lines().toSeq).flatten 515 | lines.toSeq shouldEqual Seq("Line 1", "Line 2", "Line 3", "Line 4") 516 | } 517 | } 518 | 519 | it should "exclude destination zip when it's under directory to be zipped" in { 520 | File.usingTemporaryDirectory() { dir => 521 | (dir / Symbol("f1")).touch().appendLines("Line 1", "Line 2") 522 | (dir / Symbol("f2")).touch().appendLines("Line 3", "Line 4") 523 | val zipFile = (dir / "f.zip") 524 | val zipped = dir.zipTo(zipFile.path) 525 | zipped.unzipTo().listRecursively().toList.map(_.name).forall(!_.contains("zip")) shouldBe true 526 | } 527 | } 528 | 529 | it should "handle backslashes in zip entry name" in { 530 | val list = File("src/test/resources/better/files/issues-262.zip") 531 | .unzipTo() 532 | .listRecursively() 533 | .toList 534 | assert(list.length === 3) 535 | } 536 | 537 | it should "unzip safely by default" in { 538 | val list = File("src/test/resources/better/files/issue-624.zip") 539 | .unzipTo() 540 | .listRecursively() 541 | .toList 542 | // Three total entries in the zip file: 1 without directory traversal characters and 2 with. Default secure unzip should only unzip the entry without directory traversal characters. 543 | assert(list.length === 1) 544 | } 545 | 546 | it should "unzip unsafely when safe unzip is disabled" in { 547 | // Unsafe unzip with a zipslip attack may result in OS access denied exceptions. If these occur, the test should still pass. 548 | var destList = List.empty[File] 549 | var list = List.empty[File] 550 | try { 551 | // create the directory structure safe for issue 624 zip file 552 | val toplevel = File.newTemporaryDirectory("issue-624") 553 | val dest = (toplevel / "a" / "b").createDirectories() 554 | 555 | destList = File("src/test/resources/better/files/issue-624.zip") 556 | .unzipTo(destination = dest, safeUnzip = false) 557 | .listRecursively() 558 | .toList 559 | list = toplevel 560 | .listRecursively() 561 | .filterNot(_.isDirectory()) 562 | .toList 563 | } catch { 564 | // Unsafe unzip should try to extract all entries, might result in access denied exception from OS. 565 | case exception: Throwable => assert(exception.isInstanceOf[AccessDeniedException]) 566 | } 567 | // Three total entries in the zip file: 1 without directory traversal characters and 2 with. 568 | assert(destList.length === 1) 569 | assert(list.length === 3) 570 | } 571 | 572 | it should "ungzip" in { 573 | val data = Seq("hello", "world") 574 | for { 575 | pw <- (testRoot / "test.gz").newOutputStream().asGzipOutputStream().printWriter().autoClosed 576 | line <- data 577 | } pw.println(line) 578 | 579 | (testRoot / "test.gz").inputStream().flatMap(_.asGzipInputStream().lines()).toSeq shouldEqual data 580 | } 581 | 582 | it should "gzip" in { 583 | val actual = t1 584 | .writeText("hello world") 585 | .gzipTo() 586 | .unGzipTo() 587 | .contentAsString() 588 | 589 | assert(actual === "hello world") 590 | 591 | t1.clear().newGzipOutputStream().writeAndClose("hello world2") 592 | assert(t1.newGzipInputStream().asString() === "hello world2") 593 | } 594 | 595 | it should "read bytebuffers" in { 596 | t1.writeText("hello world") 597 | for { 598 | fileChannel <- t1.newFileChannel().autoClosed 599 | } fileChannel.toMappedByteBuffer.remaining() shouldEqual t1.bytes.length 600 | 601 | (t2 writeBytes t1.bytes).contentAsString() shouldEqual t1.contentAsString() 602 | 603 | t1.newInputStream().bytes.toArray shouldEqual t1.newInputStream().byteArray 604 | } 605 | 606 | it should "convert readers to inputstreams and writers to outputstreams" in { 607 | File.temporaryFile() foreach { f => 608 | val text = List.fill(10000)("hello world") 609 | for { 610 | writer <- f.bufferedWriter() 611 | out <- writer.outputstream().autoClosed 612 | } out.write(text.mkString("\n").getBytes) 613 | val t = f.bufferedReader().flatMap(_.toInputStream().lines()) 614 | t.toList shouldEqual text 615 | } 616 | } 617 | 618 | it should "serialize/deserialize" in { 619 | assume( 620 | scalaVersion.startsWith("2.12") || scalaVersion.startsWith("2.13") 621 | ) // inline classes not serializable in Scala 2.11 because of https://github.com/scala/bug/issues/10233 622 | class Person(val name: String, val age: Int) extends Serializable 623 | val p1 = new Person("Chris", 34) 624 | 625 | File.temporaryFile() foreach { f => // serialization round-trip test 626 | assert(f.isEmpty()) 627 | f.writeSerialized(p1) 628 | assert(f.nonEmpty()) 629 | val p2: Person = f.readDeserialized[Person]() 630 | assert(p1.name === p2.name) 631 | assert(p1.age === p2.age) 632 | 633 | val p3 = f.inputStream().apply(_.asObjectInputStreamUsingClassLoader().deserialize[Person]) 634 | assert(p3.name === p2.name) 635 | assert(p3.age === p2.age) 636 | } 637 | } 638 | 639 | it should "serialize/deserialize primitives" in { 640 | assert(t1.writeSerialized(23).readDeserialized[Int]() === 23) 641 | } 642 | 643 | it should "count number of open file descriptors" in { 644 | val expected = java.lang.management.ManagementFactory.getOperatingSystemMXBean 645 | .asInstanceOf[com.sun.management.UnixOperatingSystemMXBean] 646 | .getOpenFileDescriptorCount 647 | assert((File.numberOfOpenFileDescriptors() - expected).abs <= 10) 648 | } 649 | } 650 | -------------------------------------------------------------------------------- /src/test/scala/better/files/GlobSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.io.File.separator 4 | 5 | import better.files.Dsl._ 6 | 7 | import org.scalatest.BeforeAndAfterAll 8 | 9 | class GlobSpec extends CommonSpec with BeforeAndAfterAll { 10 | var testDir: File = _ 11 | var globTree: File = _ 12 | var specialTree: File = _ 13 | 14 | var regexWildcardPath: File = _ 15 | var globWildcardPath: File = _ 16 | // 17 | // Test target for glob 18 | // 19 | // tests/ 20 | // ├── globtree 21 | // │   ├── a 22 | // │   │   ├── a2 23 | // │   │   │   ├── a2.txt 24 | // │   │   │   └── x.txt 25 | // │   │   ├── a.not 26 | // │   │   ├── a.txt 27 | // │   │   └── x.txt 28 | // │   ├── b 29 | // │   │   ├── a 30 | // │   │   │   └── ba.txt 31 | // │   │   └── b.txt 32 | // │   ├── c 33 | // │   │   ├── c.txt 34 | // │   │   └── x.txt 35 | // │   ├── empty 36 | // │   ├── link_to_a -> a 37 | // │   ├── one.txt 38 | // │   ├── readme.md 39 | // │   ├── three.txt 40 | // │   └── two.txt 41 | // └── special 42 | // ├── .* 43 | // │   └── a 44 | // │ └── a.txt 45 | // └── ** 46 | // └── a 47 | // └── a.txt 48 | // 49 | override def beforeAll() = { 50 | testDir = File.newTemporaryDirectory("glob-tests") 51 | globTree = testDir / "globtree" 52 | 53 | mkdir(globTree) 54 | val a = mkdir(globTree / "a") 55 | mkdir(globTree / "a" / "a2") 56 | touch(globTree / "a" / "a2" / "a2.txt") 57 | touch(globTree / "a" / "a2" / "x.txt") 58 | touch(globTree / "a" / "a.not") 59 | touch(globTree / "a" / "a.txt") 60 | touch(globTree / "a" / "x.txt") 61 | 62 | mkdir(globTree / "b") 63 | mkdir(globTree / "b" / "a") 64 | touch(globTree / "b" / "a" / "ba.txt") 65 | touch(globTree / "b" / "b.txt") 66 | 67 | mkdir(globTree / "c") 68 | touch(globTree / "c" / "c.txt") 69 | touch(globTree / "c" / "x.txt") 70 | 71 | mkdir(globTree / "empty") 72 | 73 | ln_s(globTree / "link_to_a", a) 74 | 75 | touch(globTree / "one.txt") 76 | touch(globTree / "two.txt") 77 | touch(globTree / "three.txt") 78 | touch(globTree / "readme.md") 79 | 80 | // Special target with path name components as wildcards 81 | specialTree = testDir / "special" 82 | 83 | // regex 84 | mkdir(specialTree) 85 | regexWildcardPath = mkdir(specialTree / ".*") 86 | mkdir(specialTree / ".*" / "a") 87 | touch(specialTree / ".*" / "a" / "a.txt") 88 | 89 | // glob 90 | globWildcardPath = mkdir(specialTree / "**") 91 | mkdir(specialTree / "**" / "a") 92 | touch(specialTree / "**" / "a" / "a.txt") 93 | 94 | () 95 | } 96 | 97 | override def afterAll() = 98 | rm(testDir) 99 | 100 | /** Helper in case something goes wrong... 101 | */ 102 | private def debugPaths(files: Seq[File]): String = { 103 | files 104 | .sortBy(_.path) 105 | .map(files => s"PATH: ${files.toString}") 106 | .mkString(s"SIZE: ${files.size}\n", "\n", "\n") 107 | } 108 | 109 | /** Verity if candidates are equal with references. 110 | * Does not accept empty sets, use assert(paths.isEmpty) for that. 111 | * 112 | * @param pathsIt candidates 113 | * @param refPaths references 114 | * @param baseDir basedir to for creating full path of references 115 | */ 116 | private def verify(pathsIt: Iterator[File], refPaths: Seq[String], baseDir: File) = { 117 | val paths = pathsIt.toSeq 118 | val refs = refPaths 119 | .map(refPath => baseDir / refPath) 120 | .sortBy(_.path) 121 | 122 | withClue("Result: " + debugPaths(paths) + "Reference: " + debugPaths(refs)) { 123 | assert(paths.length === refPaths.length) 124 | assert(paths.nonEmpty) 125 | import better.files.Dsl._ 126 | paths.sortBy(_.path).zip(refs).foreach({ case (path, refPath) => assert(path === refPath) }) 127 | } 128 | } 129 | 130 | "glob" should "match plain file (e.g. 'file.ext')" in { 131 | val refPaths = Seq( 132 | "one.txt" 133 | ) 134 | val paths = globTree.glob("one.txt") 135 | verify(paths, refPaths, globTree) 136 | } 137 | it should "match path without glob (e.g. 'sub/dir/file.ext')" in { 138 | val refPaths = Seq( 139 | "a/a.txt" 140 | ) 141 | val paths = globTree.glob("a/a.txt") 142 | verify(paths, refPaths, globTree) 143 | } 144 | 145 | it should "match file-glob (e.g. '*.ext')" in { 146 | val refPaths = Seq( 147 | "one.txt", 148 | "two.txt", 149 | "three.txt" 150 | ) 151 | val paths = globTree.glob("*.txt") 152 | verify(paths, refPaths, globTree) 153 | assert(globTree.glob("*.txt", includePath = false, syntax = File.PathMatcherSyntax.glob).isEmpty) 154 | } 155 | 156 | it should "match fixed sub dir and file-glob (e.g. '**/subdir/*.ext')" in { 157 | // TODO: DOC: why top level 'a' is not matched 158 | val refPaths = List( 159 | "b/a/ba.txt" 160 | ) 161 | val paths = globTree.glob("**/a/*.txt") 162 | verify(paths, refPaths, globTree) 163 | } 164 | 165 | it should "use parent dir for matching (e.g. plain 'subdir/*.ext')" in { 166 | // e.g. check that b nor c are matched, nor b/a 167 | val refPaths = Seq( 168 | "a/a.txt", 169 | "a/x.txt" 170 | ) 171 | val paths = globTree.glob("a/*.txt") 172 | verify(paths, refPaths, globTree) 173 | } 174 | 175 | it should "match sub-directory glob with plain file (e.g. 'subdir/*/file.ext')" in { 176 | val refPaths = Seq( 177 | "a/x.txt", 178 | "c/x.txt" 179 | ) 180 | val paths = testDir.glob("globtree/*/x.txt") 181 | verify(paths, refPaths, globTree) 182 | } 183 | 184 | it should "match sub-directory glob with file-glob (e.g. 'subdir/*/*.ext')" in { 185 | val refPaths = Seq( 186 | "a/a.txt", 187 | "a/x.txt", 188 | "c/c.txt", 189 | "c/x.txt", 190 | "b/b.txt" 191 | ) 192 | val paths = testDir.glob("globtree/*/*.txt") 193 | verify(paths, refPaths, globTree) 194 | } 195 | 196 | it should "match deep sub-directory glob with plain file (e.g. 'subdir/**/file.ext')" in { 197 | val refPaths = Seq( 198 | "a/a2/x.txt", 199 | "a/x.txt", 200 | "c/x.txt" 201 | ) 202 | val p1s = globTree.glob("**/x.txt") 203 | verify(p1s, refPaths, globTree) 204 | 205 | val p2s = testDir.glob("globtree/**/x.txt") 206 | verify(p2s, refPaths, globTree) 207 | } 208 | 209 | it should "match deep sub-directory glob with file-glob (e.g. 'subdir/**/*.ext')" in { 210 | val refPaths = Seq( 211 | "a/a.txt", 212 | "a/x.txt", 213 | "a/a2/x.txt", 214 | "a/a2/a2.txt", 215 | "c/x.txt", 216 | "c/c.txt", 217 | "b/b.txt", 218 | "b/a/ba.txt" 219 | ) 220 | val p1s = globTree.glob("**/*.txt") 221 | verify(p1s, refPaths, globTree) 222 | 223 | val p2s = testDir.glob("globtree/**/*.txt") 224 | verify(p2s, refPaths, globTree) 225 | } 226 | 227 | it should "match deep file-glob (e.g. 'subdir/**.ext')" in { 228 | val refPaths = Seq( 229 | "one.txt", 230 | "two.txt", 231 | "three.txt", 232 | "a/a.txt", 233 | "a/x.txt", 234 | "a/a2/x.txt", 235 | "a/a2/a2.txt", 236 | "b/a/ba.txt", 237 | "b/b.txt", 238 | "c/x.txt", 239 | "c/c.txt" 240 | ) 241 | val p1s = globTree.glob("**.txt") 242 | verify(p1s, refPaths, globTree) 243 | 244 | val p2s = testDir.glob("globtree/**.txt") 245 | verify(p2s, refPaths, globTree) 246 | } 247 | 248 | it should "match everything (e.g. 'subdir/**')" in { 249 | val refPaths = List( 250 | "a", 251 | "a/a.not", 252 | "a/a.txt", 253 | "a/a2", 254 | "a/a2/a2.txt", 255 | "a/a2/x.txt", 256 | "a/x.txt", 257 | "b", 258 | "b/a", 259 | "b/a/ba.txt", 260 | "b/b.txt", 261 | "c", 262 | "c/c.txt", 263 | "c/x.txt", 264 | "empty", 265 | "one.txt", 266 | "readme.md", 267 | "three.txt", 268 | "two.txt", 269 | "link_to_a" 270 | ) 271 | 272 | val paths = testDir.glob("globtree/**") 273 | verify(paths, refPaths, globTree) 274 | } 275 | 276 | it should "work with links (e.g. 'link_to_a/**.txt')" in { 277 | val refPaths = Seq( 278 | "a/a.txt", 279 | "a/x.txt", 280 | "a/a2/x.txt", 281 | "a/a2/a2.txt" 282 | ) 283 | 284 | // TODO: DOC: File behaviour, links are resolved (abs + normalized path) 285 | 286 | val p1s = globTree.glob("link_to_a/**.txt", visitOptions = File.VisitOptions.follow) 287 | verify(p1s, refPaths, globTree) 288 | 289 | val p2s = globTree.glob("link_to_a/**.txt").toSeq 290 | assert(p2s.isEmpty) 291 | 292 | val p3s = testDir.glob("globtree/link_to_a/**.txt", visitOptions = File.VisitOptions.follow) 293 | verify(p3s, refPaths, globTree) 294 | 295 | val p4s = testDir.glob("globtree/link_to_a/**.txt") 296 | assert(p4s.isEmpty) 297 | } 298 | 299 | it should "not use dir name as wildcard (e.g. dirname is **)" in { 300 | val d = globWildcardPath // "path" / "with" / "**" 301 | val paths = d.glob("*.txt") 302 | 303 | assert(paths.isEmpty) 304 | } 305 | 306 | "Regex" should "match all txt-files under sub-directory (e.g. '.*/.*\\\\.txt')" in { 307 | val refPaths = Seq( 308 | "a/a.txt", 309 | "a/x.txt", 310 | "a/a2/x.txt", 311 | "a/a2/a2.txt", 312 | "c/x.txt", 313 | "c/c.txt", 314 | "b/b.txt", 315 | "b/a/ba.txt" 316 | ) 317 | val paths = globTree.glob(".*" + separator + ".*\\.txt", syntax = File.PathMatcherSyntax.regex) 318 | 319 | verify(paths, refPaths, globTree) 320 | } 321 | 322 | it should "match the same if `Regex` is used" in { 323 | val pattern = (".*" + separator + ".*\\.txt").r 324 | 325 | val pathsGlob = globTree.glob(pattern.regex, syntax = File.PathMatcherSyntax.regex) 326 | val pathsRegex = globTree.globRegex(pattern) 327 | 328 | verify(pathsRegex, pathsGlob.toSeq.map(_.toString), globTree) 329 | 330 | } 331 | 332 | it should "use parent dir for matching (e.g. plain 'subdir/*.ext' instead of '**/subdir/*.ext)" in { 333 | // e.g. check that b nor c are matched, nor b/a 334 | val refPaths = Seq( 335 | "a/a.txt", 336 | "a/x.txt", 337 | "a/a2/a2.txt", 338 | "a/a2/x.txt" 339 | ) 340 | val paths = globTree.glob("a" + separator + ".*\\.txt", syntax = File.PathMatcherSyntax.regex) 341 | 342 | verify(paths, refPaths, globTree) 343 | assert(globTree.glob("a/.*\\.txt", includePath = false, syntax = File.PathMatcherSyntax.regex).isEmpty) 344 | } 345 | 346 | it should "not use dir name as wildcard (e.g. dirname is .*)" in { 347 | val d = regexWildcardPath // "path" / "with" / ".*" 348 | val paths = d.glob("a\\.txt", syntax = File.PathMatcherSyntax.regex) 349 | assert(paths.isEmpty) 350 | } 351 | } 352 | -------------------------------------------------------------------------------- /src/test/scala/better/files/ResourceSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import java.net.{URL, URLClassLoader} 4 | import java.nio.charset.Charset 5 | 6 | import better.files.test_pkg.ResourceSpecHelper 7 | 8 | final class ResourceSpec extends CommonSpec { 9 | implicit val charset: Charset = java.nio.charset.StandardCharsets.US_ASCII 10 | val testFileText = "This is the test-file.txt file." 11 | val altTestFileText = "This is the another-test-file.txt file." 12 | val testFile = "better/files/test-file.txt" 13 | val testFileRel = "test-file.txt" 14 | val testFileAltRel = "another-test-file.txt" 15 | val testFileFromCL = "files/test-file.txt" 16 | 17 | "Resource" can "look up from the context class loader" in { 18 | assert(Resource.asStream(testFile).get.asString() startsWith testFileText) 19 | } 20 | 21 | it can "look up from a specified class loader" in { 22 | val clURL = new URL(Resource.my.getUrl("ResourceSpec.class"), "../") 23 | assert(clURL.toExternalForm endsWith "/") 24 | assert(Resource.from(new URLClassLoader(Array(clURL))).getAsString(testFileFromCL) startsWith testFileText) 25 | } 26 | 27 | it can "look up from the call site" in { 28 | assert(Resource.my.asStream(testFileRel).get.asString() startsWith testFileText) 29 | // This tests that Resource.my uses the correct call site when called from outside the better.files package. 30 | assert((new ResourceSpecHelper).openTestStream().asString() startsWith altTestFileText) 31 | } 32 | 33 | it can "look up from a statically-known type" in { 34 | assert(Resource.at[ResourceSpec].getAsString(testFileRel) startsWith testFileText) 35 | assert(Resource.at[Resource.type].getAsString(testFileRel) startsWith testFileText) 36 | } 37 | 38 | it can "look up from a java.lang.Class" in { 39 | assert(Resource.at(Class.forName("better.files.File")).getAsString(testFileRel) startsWith testFileText) 40 | } 41 | 42 | it can "look up a file in another package" in { 43 | assert(Resource.at[ResourceSpecHelper].getAsString(testFileAltRel) startsWith altTestFileText) 44 | } 45 | 46 | it should "require a concrete type" in { 47 | """def foo[T] = better.files.Resource.at[T].asStream("foo")""" shouldNot typeCheck 48 | } 49 | 50 | it should "fetch root url" in { 51 | assert(Option(Resource.getUrl()).isDefined) 52 | } 53 | 54 | it should "work with using util" in { 55 | File.usingTemporaryFile() { file => 56 | file.appendText("hello world") 57 | val lines = using(file.newInputStream()) { is => 58 | is.lines().toList 59 | } 60 | assert(lines === "hello world" :: Nil) 61 | } 62 | } 63 | 64 | it should "close multiple resources" in { 65 | def emit(dir: File, partitions: Int, lines: Int) = { 66 | for { 67 | writers <- Vector.tabulate(partitions)(i => (dir / s"partition-$i.csv").newPrintWriter()).autoClosed 68 | line <- 1 to lines 69 | } writers(line % partitions).println(line) 70 | } 71 | 72 | File.usingTemporaryDirectory() { dir => 73 | val lines = 1000 74 | emit(dir = dir, partitions = 5, lines = lines) 75 | val expected = dir.list(filter = _.extension().contains(".csv")).flatMap(_.lines()).map(_.toInt).toSet 76 | assert((1 to lines).forall(expected)) 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/test/scala/better/files/ScannerSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | 3 | import scala.collection.compat._ 4 | 5 | import Dsl._ 6 | 7 | class ScannerSpec extends CommonSpec { 8 | def t1 = File.newTemporaryFile() 9 | 10 | "splitter" should "split" in { 11 | val csvSplitter = StringSplitter.on(',') 12 | def split(s: String) = csvSplitter.split(s).iterator.to(List) 13 | 14 | assert(split(",") === List("", "")) 15 | assert(split("") === List("")) 16 | assert(split("Hello World") === List("Hello World")) 17 | assert(split("Hello,World") === List("Hello", "World")) 18 | 19 | assert(split(",,") === List("", "", "")) 20 | assert(split(",Hello,World,") === List("", "Hello", "World", "")) 21 | assert(split(",Hello,World") === List("", "Hello", "World")) 22 | assert(split("Hello,World,") === List("Hello", "World", "")) 23 | } 24 | 25 | "scanner" should "parse files" in { 26 | val data = t1 << s""" 27 | | Hello World 28 | | 1 2 3 29 | | Ok 23 football 30 | """.stripMargin 31 | data.scanner() foreach { scanner => 32 | assert(scanner.lineNumber() == 0) 33 | assert(scanner.next[String] == "Hello") 34 | assert(scanner.lineNumber() == 2) 35 | assert(scanner.next[String] == "World") 36 | assert(scanner.next[Int] == 1) 37 | assert(scanner.next[Int] == 2) 38 | assert(scanner.lineNumber() == 3) 39 | assert(scanner.next[Int] == 3) 40 | assert(scanner.nextLine() == " Ok 23 football") 41 | assert(!scanner.hasNext) 42 | a[NoSuchElementException] should be thrownBy scanner.next() 43 | a[NoSuchElementException] should be thrownBy scanner.nextLine() 44 | assert(!scanner.hasNext) 45 | } 46 | data.tokens().toSeq shouldEqual data.newScanner().toSeq 47 | } 48 | 49 | it should "parse longs/booleans" in { 50 | val data = for { 51 | scanner <- Scanner("10 false").autoClosed 52 | } yield scanner.next[(Long, Boolean)] 53 | data.get() shouldBe ((10L, false)) 54 | } 55 | 56 | it should "parse custom parsers" in { 57 | val file = t1 < """ 58 | |Garfield 59 | |Woofer 60 | """.stripMargin 61 | 62 | sealed trait Animal 63 | case class Dog(name: String) extends Animal 64 | case class Cat(name: String) extends Animal 65 | 66 | implicit val animalParser: Scannable[Animal] = Scannable { scanner => 67 | val name = scanner.next[String] 68 | if (name == "Garfield") Cat(name) else Dog(name) 69 | } 70 | file.scanner() foreach { scanner => 71 | Seq.fill(2)(scanner.next[Animal]) should contain theSameElementsInOrderAs Seq(Cat("Garfield"), Dog("Woofer")) 72 | } 73 | } 74 | 75 | it should "parse empty tokens" in { 76 | val scanner = Scanner("hello||world", StringSplitter.on('|')) 77 | List.fill(3)(scanner.next[Option[String]]) shouldEqual List(Some("hello"), None, Some("world")) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/test/scala/better/files/akka/FileWatcher.scala: -------------------------------------------------------------------------------- 1 | package better.files.akka 2 | 3 | import scala.collection.mutable 4 | 5 | import better.files._ 6 | 7 | import _root_.akka.actor._ 8 | 9 | /** An actor that can watch a file or a directory 10 | * Instead of directly calling the constructor of this, call file.newWatcher to create the actor 11 | * 12 | * @param file watch this file (or directory) 13 | * @param maxDepth In case of directories, how much depth should we watch 14 | */ 15 | class FileWatcher(file: File, maxDepth: Int) extends Actor { 16 | import FileWatcher._ 17 | 18 | def this(file: File, recursive: Boolean = true) = this(file, if (recursive) Int.MaxValue else 0) 19 | 20 | protected[this] val callbacks = new MutableMultiMap[Event, Callback] 21 | 22 | protected[this] val monitor: File.Monitor = new FileMonitor(file, maxDepth) { 23 | override def onEvent(event: Event, file: File, count: Int) = self ! Message.NewEvent(event, file, count) 24 | override def onException(exception: Throwable) = self ! Status.Failure(exception) 25 | } 26 | 27 | override def preStart() = monitor.start()(executionContext = context.dispatcher) 28 | 29 | override def receive = { 30 | case Message.NewEvent(event, target, count) if callbacks.contains(event) => 31 | callbacks(event).foreach(f => repeat(count)(f(event -> target))) 32 | case Message.RegisterCallback(events, callback) => events.foreach(event => callbacks.addBinding(event, callback)) 33 | case Message.RemoveCallback(event, callback) => callbacks.removeBinding(event, callback) 34 | } 35 | 36 | override def postStop() = monitor.stop() 37 | } 38 | 39 | object FileWatcher { 40 | import java.nio.file.{Path, WatchEvent} 41 | 42 | type Event = WatchEvent.Kind[Path] 43 | type Callback = PartialFunction[(Event, File), Unit] 44 | 45 | sealed trait Message 46 | object Message { 47 | case class NewEvent(event: Event, file: File, count: Int) extends Message 48 | case class RegisterCallback(events: Iterable[Event], callback: Callback) extends Message 49 | case class RemoveCallback(event: Event, callback: Callback) extends Message 50 | } 51 | 52 | implicit val disposeActorSystem: Disposable[ActorSystem] = 53 | Disposable(_.terminate()) 54 | 55 | implicit class FileWatcherOps(file: File) { 56 | def watcherProps(recursive: Boolean): Props = 57 | Props(new FileWatcher(file, recursive)) 58 | 59 | def newWatcher(recursive: Boolean = true)(implicit system: ActorSystem): ActorRef = 60 | system.actorOf(watcherProps(recursive)) 61 | } 62 | 63 | def when(events: Event*)(callback: Callback): Message = 64 | Message.RegisterCallback(events, callback) 65 | 66 | def on(event: Event)(callback: File => Unit): Message = 67 | when(event) { case (`event`, file) => callback(file) } 68 | 69 | def stop(event: Event, callback: Callback): Message = 70 | Message.RemoveCallback(event, callback) 71 | } 72 | 73 | class MutableMultiMap[K, V] { 74 | private[this] val table = mutable.Map.empty[K, mutable.Set[V]] 75 | def contains(k: K): Boolean = table.contains(k) 76 | def apply(k: K): Set[V] = table.getOrElse(k, mutable.Set.empty).toSet 77 | def addBinding(k: K, v: V): Unit = table.getOrElseUpdate(k, mutable.Set.empty) += v 78 | def removeBinding(k: K, v: V): Unit = if (contains(k)) table(k) -= v 79 | } 80 | -------------------------------------------------------------------------------- /src/test/scala/better/files/akka/FileWatcherSpec.scala: -------------------------------------------------------------------------------- 1 | package better.files.akka 2 | 3 | import scala.collection.mutable 4 | import scala.concurrent.duration._ 5 | import scala.language.postfixOps 6 | 7 | import better.files._ 8 | import better.files.Dsl._ 9 | 10 | class FileWatcherSpec extends CommonSpec { 11 | "file watcher" should "watch directories" in { 12 | assume(isLinux) 13 | File.usingTemporaryDirectory() { dir => 14 | (dir / "a" / "b" / "c.txt").createIfNotExists(createParents = true) 15 | 16 | var actualEvents = List.empty[String] 17 | def output(file: File, event: String) = 18 | synchronized { 19 | val msg = s"${dir.path relativize file.path} got $event" 20 | println(msg) 21 | actualEvents = msg :: actualEvents 22 | } 23 | 24 | /*************************************************************************/ 25 | 26 | import java.nio.file.{StandardWatchEventKinds => Events} 27 | import FileWatcher._ 28 | 29 | import _root_.akka.actor._ 30 | implicit val system = ActorSystem() 31 | 32 | val watcher: ActorRef = dir.newWatcher() 33 | 34 | watcher ! when(events = Events.ENTRY_CREATE, Events.ENTRY_MODIFY) { // watch for multiple events 35 | case (Events.ENTRY_CREATE, file) => output(file, "created") 36 | case (Events.ENTRY_MODIFY, file) => output(file, "modified") 37 | } 38 | 39 | watcher ! on(Events.ENTRY_DELETE)(file => output(file, "deleted")) // register partial function for single event 40 | 41 | /************************************************************************/ 42 | 43 | sleep(5 seconds) 44 | 45 | val expectedEvents = mutable.ListBuffer.empty[String] 46 | 47 | def doIO[U](logs: String*)(f: => U): Unit = { 48 | expectedEvents ++= logs 49 | f 50 | sleep() 51 | } 52 | 53 | doIO("a/b/c.txt got modified") { 54 | (dir / "a" / "b" / "c.txt").writeText("Hello world") 55 | } 56 | doIO("a/b got deleted", "a/b/c.txt got deleted") { 57 | rm(dir / "a" / "b") 58 | } 59 | doIO("d got created") { 60 | mkdir(dir / "d") 61 | } 62 | doIO("d/e.txt got created") { 63 | touch(dir / "d" / "e.txt") 64 | } 65 | doIO("d/f got created") { 66 | mkdirs(dir / "d" / "f" / "g") 67 | } 68 | doIO("d/f/g/e.txt got created") { 69 | touch(dir / "d" / "f" / "g" / "e.txt") 70 | } 71 | 72 | doIO("a/e.txt got created", "d/f/g/e.txt got deleted") { 73 | (dir / "d" / "f" / "g" / "e.txt") moveTo (dir / "a" / "e.txt") 74 | } 75 | 76 | sleep(10 seconds) 77 | 78 | println(s""" 79 | |Expected=${expectedEvents.sorted} 80 | |Actual=${actualEvents.sorted} 81 | |""".stripMargin) 82 | 83 | expectedEvents.diff(actualEvents) shouldBe empty 84 | 85 | def checkNotWatching[U](msg: String)(f: => U) = { 86 | val before = List(actualEvents: _*) 87 | f 88 | sleep() 89 | val after = List(actualEvents: _*) 90 | assert(before === after, msg) 91 | } 92 | 93 | system.stop(watcher) 94 | sleep() 95 | checkNotWatching("stop watching after actor is stopped") { 96 | mkdirs(dir / "e") 97 | } 98 | 99 | system.terminate() 100 | sleep() 101 | checkNotWatching("stop watching after actor-system is stopped") { 102 | mkdirs(dir / "f") 103 | } 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /src/test/scala/better/files/akka/README.md: -------------------------------------------------------------------------------- 1 | Reproduction of [this Java Advent article](http://www.javaadvent.com/2015/12/reactive-file-system-monitoring-using-akka-actors.html) 2 | 3 | ----- 4 | 5 | In this article, we will discuss: 6 | 7 | 0. File system monitoring using [Java NIO.2][nio2] 8 | 1. Common pitfalls of the default Java library 9 | 2. Design a simple thread-based file system monitor 10 | 3. Use the above to design a reactive file system monitor using the [actor][akka] [model][actorModel] 11 | 12 | **Note**: Although all the code samples here are in Scala, it can be rewritten in simple Java too. To quickly familiarize yourself with Scala syntax, [here is a very short and nice Scala cheatsheet][cheatsheet]. For a more comprehensive guide to Scala for Java programmers, [consult this][cheatsheet2] (not needed to follow this article). 13 | 14 | For the absolute shortest cheatsheet, the following Java code: 15 | 16 | ```java 17 | public void foo(int x, int y) { 18 | int z = x + y 19 | if (z == 1) { 20 | System.out.println(x); 21 | } else { 22 | System.out.println(y); 23 | } 24 | } 25 | ``` 26 | 27 | is equivalent to the following Scala code: 28 | 29 | ```scala 30 | def foo(x: Int, y: Int): Unit = { 31 | val z: Int = x + y 32 | z match { 33 | case 1 => println(x) 34 | case _ => println(y) 35 | } 36 | } 37 | ``` 38 | 39 | 40 | All the code presented here is available under MIT license as part of the [better-files][better-files-watcher] library on [GitHub][better-files]. 41 | 42 | ----------- 43 | 44 | Let's say you are tasked to build a cross-platform desktop file-search engine. You quickly realize that after the initial indexing of all the files, you need to also quickly reindex any new files (or directories) that got created or updated. A naive way would be to simply rescan the entire file system every few minutes; but that would be incredibly inefficient since most operating systems expose file system notification APIs that allow the application programmer to register callbacks for changes e.g. [ionotify][ionotify-wiki] in Linux, [FSEvenets][fsevents-wiki] in Mac and [FindFirstChangeNotification][FindFirstChangeNotification] in Windows. 45 | 46 | But now you are stuck dealing with OS-specific APIs! Thankfully, beginning Java SE 7, we have a platform independent abstraction for watching file system changes via the [WatchService API][javadoc-watchservice]. The WatchService API was developed as part of [Java NIO.2][nio2-wiki], under [JSR-51][jsr-51] and here is a "hello world" example of using it to watch a given [Path][javadoc-path]: 47 | 48 | ```scala 49 | import java.nio.file._ 50 | import java.nio.file.StandardWatchEventKinds._ 51 | import scala.collection.JavaConversions._ 52 | 53 | def watch(directory: Path): Unit = { 54 | // First create the service 55 | val service: WatchService = directory.getFileSystem.newWatchService() 56 | 57 | // Register the service to the path and also specify which events we want to be notified about 58 | directory.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) 59 | 60 | while (true) { 61 | val key: WatchKey = service.take() // Wait for this key to be signalled 62 | for {event <- key.pollEvents()} { 63 | // event.context() is the path to the file that got changed 64 | event.kind() match { 65 | case ENTRY_CREATE => println(s"${event.context()} got created") 66 | case ENTRY_MODIFY => println(s"${event.context()} got modified") 67 | case ENTRY_DELETE => println(s"${event.context()} got deleted") 68 | case _ => 69 | // This can happen when OS discards or loses an event. 70 | // See: http://docs.oracle.com/javase/8/docs/api/java/nio/file/StandardWatchEventKinds.html#OVERFLOW 71 | println(s"Unknown event $event happened at ${event.context()}") 72 | } 73 | } 74 | key.reset() // Do not forget to do this!! See: http://stackoverflow.com/questions/20180547/ 75 | } 76 | } 77 | ``` 78 | 79 | Although the above is a good first attempt, it lacks in several aspects: 80 | 81 | 0. **Bad Design**: The above code looks unnatural and you probably had to [look it up on StackOverflow][so-down] to get it right. Can we do better? 82 | 2. **Bad Design**: The code does not do a very good job of handling errors. What happens when we encounter a file we could not open? 83 | 3. **Gotcha**: The Java API only allows us to watch the directory for changes to its direct children; it [does not recursively watch a directory][so-recursive-watching] for you. 84 | 4. **Gotcha**: The Java API [does not allow us to watch a single file][so-only-watch-dirs], only a directory. 85 | 5. **Gotcha**: Even if we resolve the aformentioned issues, the Java API [does not automatically start watching a new child file][so-autowatch] or directory created under the root. 86 | 6. **Bad Design**: The code as implemented above, exposes a blocking/polling, thread-based model. Can we use a better concurrency abstraction? 87 | 88 | ----------- 89 | 90 | 91 | Let's start with each of the above concerns. 92 | 93 | * **A better interface**: Here is what *my ideal* interface would look like: 94 | 95 | ```scala 96 | abstract class FileMonitor(root: Path) { 97 | def start(): Unit 98 | def onCreate(path: Path): Unit 99 | def onModify(path: Path): Unit 100 | def onDelete(path: Path): Unit 101 | def stop(): Unit 102 | } 103 | ``` 104 | 105 | That way, I can simply write the example code as: 106 | 107 | ```scala 108 | val watcher = new FileMonitor(myFile) { 109 | override def onCreate(path: Path) = println(s"$path got created") 110 | override def onModify(path: Path) = println(s"$path got modified") 111 | override def onDelete(path: Path) = println(s"$path got deleted") 112 | } 113 | watcher.start() 114 | ``` 115 | 116 | Ok, let's try to adapt the first example using a Java `Thread` so that we can expose "my ideal interface": 117 | 118 | ```scala 119 | trait FileMonitor { // My ideal interface 120 | val root: Path // starting file 121 | def start(): Unit // start the monitor 122 | def onCreate(path: Path) = {} // on-create callback 123 | def onModify(path: Path) = {} // on-modify callback 124 | def onDelete(path: Path) = {} // on-delete callback 125 | def onUnknownEvent(event: WatchEvent[_]) = {} // handle lost/discarded events 126 | def onException(e: Throwable) = {} // handle errors e.g. a read error 127 | def stop(): Unit // stop the monitor 128 | } 129 | ``` 130 | 131 | And here is a very basic thread-based implementation: 132 | 133 | ```scala 134 | class ThreadFileMonitor(val root: Path) extends Thread with FileMonitor { 135 | setDaemon(true) // daemonize this thread 136 | setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler { 137 | override def uncaughtException(thread: Thread, exception: Throwable) = onException(exception) 138 | }) 139 | 140 | val service = root.getFileSystem.newWatchService() 141 | 142 | override def run() = Iterator.continually(service.take()).foreach(process) 143 | 144 | override def interrupt() = { 145 | service.close() 146 | super.interrupt() 147 | } 148 | 149 | override def start() = { 150 | watch(root) 151 | super.start() 152 | } 153 | 154 | protected[this] def watch(file: Path): Unit = { 155 | file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) 156 | } 157 | 158 | protected[this] def process(key: WatchKey) = { 159 | key.pollEvents() foreach { 160 | case event: WatchEvent[Path] => dispatch(event.kind(), event.context()) 161 | case event => onUnknownEvent(event) 162 | } 163 | key.reset() 164 | } 165 | 166 | def dispatch(eventType: WatchEvent.Kind[Path], file: Path): Unit = { 167 | eventType match { 168 | case ENTRY_CREATE => onCreate(file) 169 | case ENTRY_MODIFY => onModify(file) 170 | case ENTRY_DELETE => onDelete(file) 171 | } 172 | } 173 | } 174 | ``` 175 | 176 | The above looks much cleaner! Now we can watch files to our heart's content without poring over the details of JavaDocs by simply implementing the `onCreate(path)`, `onModify(path)`, `onDelete(path)` etc. 177 | 178 | * **Exception handling**: This is already done above. `onException` gets called whenever we encounter an exception and the invoker can decide what to do next by implementing it. 179 | 180 | * **Recursive watching**: The Java API **does not allow recursive watching of directories**. We need to modify the `watch(file)` to recursively attach the watcher: 181 | 182 | ```scala 183 | def watch(file: Path, recursive: Boolean = true): Unit = { 184 | if (Files.isDirectory(file)) { 185 | file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) 186 | // recursively call watch on children of this file 187 | if (recursive) { 188 | Files.list(file).iterator() foreach {f => watch(f, recursive)} 189 | } 190 | } 191 | } 192 | ``` 193 | 194 | * **Watching regular files**: As mentioned before, the Java API **can only watch directories**. One hack we can do to watch single files is to set a watcher on its parent directory and only react if the event trigerred on the file itself. 195 | 196 | ```scala 197 | override def start() = { 198 | if (Files.isDirectory(root)) { 199 | watch(root, recursive = true) 200 | } else { 201 | watch(root.getParent, recursive = false) 202 | } 203 | super.start() 204 | } 205 | ``` 206 | 207 | And, now in `process(key)`, we make sure we react to either a directory or that file only: 208 | 209 | ```scala 210 | def reactTo(target: Path) = Files.isDirectory(root) || (root == target) 211 | ``` 212 | 213 | And, we check before `dispatch` now: 214 | 215 | ```scala 216 | case event: WatchEvent[Path] => 217 | val target = event.context() 218 | if (reactTo(target)) { 219 | dispatch(event.kind(), target) 220 | } 221 | ``` 222 | 223 | * **Auto-watching new items**: The Java API, **does not auto-watch any new sub-files**. We can address this by attaching the watcher ourselves in `process(key)` when an `ENTRY_CREATE` event is fired: 224 | 225 | ```scala 226 | if (reactTo(target)) { 227 | if (Files.isDirectory(root) && event.kind() == ENTRY_CREATE) { 228 | watch(root.resolve(target)) 229 | } 230 | dispatch(event.kind(), target) 231 | } 232 | ``` 233 | 234 | Putting it all together, we have our final [`FileMonitor.scala`][FileMonitor.scala]: 235 | 236 | ```scala 237 | class ThreadFileMonitor(val root: Path) extends Thread with FileMonitor { 238 | setDaemon(true) // daemonize this thread 239 | setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler { 240 | override def uncaughtException(thread: Thread, exception: Throwable) = onException(exception) 241 | }) 242 | 243 | val service = root.getFileSystem.newWatchService() 244 | 245 | override def run() = Iterator.continually(service.take()).foreach(process) 246 | 247 | override def interrupt() = { 248 | service.close() 249 | super.interrupt() 250 | } 251 | 252 | override def start() = { 253 | if (Files.isDirectory(root)) { 254 | watch(root, recursive = true) 255 | } else { 256 | watch(root.getParent, recursive = false) 257 | } 258 | super.start() 259 | } 260 | 261 | protected[this] def watch(file: Path, recursive: Boolean = true): Unit = { 262 | if (Files.isDirectory(file)) { 263 | file.register(service, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY) 264 | if (recursive) { 265 | Files.list(file).iterator() foreach {f => watch(f, recursive)} 266 | } 267 | } 268 | } 269 | 270 | private[this] def reactTo(target: Path) = Files.isDirectory(root) || (root == target) 271 | 272 | protected[this] def process(key: WatchKey) = { 273 | key.pollEvents() foreach { 274 | case event: WatchEvent[Path] => 275 | val target = event.context() 276 | if (reactTo(target)) { 277 | if (Files.isDirectory(root) && event.kind() == ENTRY_CREATE) { 278 | watch(root.resolve(target)) 279 | } 280 | dispatch(event.kind(), target) 281 | } 282 | case event => onUnknownEvent(event) 283 | } 284 | key.reset() 285 | } 286 | 287 | def dispatch(eventType: WatchEvent.Kind[Path], file: Path): Unit = { 288 | eventType match { 289 | case ENTRY_CREATE => onCreate(file) 290 | case ENTRY_MODIFY => onModify(file) 291 | case ENTRY_DELETE => onDelete(file) 292 | } 293 | } 294 | } 295 | ``` 296 | 297 | ----- 298 | Now, that we have addressed all the gotchas and distanced ourselves from the intricacies of the WatchService API, we are still tightly coupled to the thread-based API. 299 | We will use the above class to expose a different concurrency model, namely, the [actor model][actorModel2] instead to design a reactive, dynamic and resilient file-system watcher using [Akka][akka-docs]. Although the [construction of Akka actors][akka-actors] is beyond the scope of this article, we will present a very simple actor that uses the `ThreadFileMonitor`: 300 | 301 | ```scala 302 | import java.nio.file.{Path, WatchEvent} 303 | 304 | import akka.actor._ 305 | 306 | class FileWatcher(file: Path) extends ThreadFileMonitor(file) with Actor { 307 | import FileWatcher._ 308 | 309 | // MultiMap from Events to registered callbacks 310 | protected[this] val callbacks = newMultiMap[Event, Callback] 311 | 312 | // Override the dispatcher from ThreadFileMonitor to inform the actor of a new event 313 | override def dispatch(event: Event, file: Path) = self ! Message.NewEvent(event, file) 314 | 315 | // Override the onException from the ThreadFileMonitor 316 | override def onException(exception: Throwable) = self ! Status.Failure(exception) 317 | 318 | // when actor starts, start the ThreadFileMonitor 319 | override def preStart() = super.start() 320 | 321 | // before actor stops, stop the ThreadFileMonitor 322 | override def postStop() = super.interrupt() 323 | 324 | override def receive = { 325 | case Message.NewEvent(event, target) if callbacks contains event => 326 | callbacks(event) foreach {f => f(event -> target)} 327 | 328 | case Message.RegisterCallback(events, callback) => 329 | events foreach {event => callbacks.addBinding(event, callback)} 330 | 331 | case Message.RemoveCallback(event, callback) => 332 | callbacks.removeBinding(event, callback) 333 | } 334 | } 335 | 336 | object FileWatcher { 337 | type Event = WatchEvent.Kind[Path] 338 | type Callback = PartialFunction[(Event, Path), Unit] 339 | 340 | sealed trait Message 341 | object Message { 342 | case class NewEvent(event: Event, file: Path) extends Message 343 | case class RegisterCallback(events: Seq[Event], callback: Callback) extends Message 344 | case class RemoveCallback(event: Event, callback: Callback) extends Message 345 | } 346 | } 347 | ``` 348 | 349 | This allows us to dynamically register and remove callbacks to react to file system events: 350 | 351 | ```scala 352 | // initialize the actor instance 353 | val system = ActorSystem("mySystem") 354 | val watcher: ActorRef = system.actorOf(Props(new FileWatcher(Paths.get("/home/pathikrit")))) 355 | 356 | // util to create a RegisterCallback message for the actor 357 | def when(events: Event*)(callback: Callback): Message = { 358 | Message.RegisterCallback(events.distinct, callback) 359 | } 360 | 361 | // send the register callback message for create/modify events 362 | watcher ! when(events = ENTRY_CREATE, ENTRY_MODIFY) { 363 | case (ENTRY_CREATE, file) => println(s"$file got created") 364 | case (ENTRY_MODIFY, file) => println(s"$file got modified") 365 | } 366 | ``` 367 | 368 | Full source: [`FileWatcher.scala`][FileWatcher.scala] 369 | 370 | ----- 371 | 372 | [actorModel]: https://en.wikipedia.org/wiki/Actor_model 373 | [actorModel2]: http://berb.github.io/diploma-thesis/original/054_actors.html 374 | [akka]: http://akka.io 375 | [akka-actors]: http://doc.akka.io/docs/akka/snapshot/scala/actors.html 376 | [akka-docs]: http://doc.akka.io/docs/akka/2.4.1/java.html 377 | [better-files]: https://github.com/pathikrit/better-files 378 | [better-files-watcher]: https://github.com/pathikrit/better-files#akka-file-watcher 379 | [cheatsheet]: http://learnxinyminutes.com/docs/scala/ 380 | [cheatsheet2]: http://techblog.realestate.com.au/java-to-scala-cheatsheet/ 381 | [FileWatcher.scala]: https://github.com/pathikrit/better-files/blob/2ea6bb694551f1fe6e9ce58dbd1b814391a02e5a/akka/src/main/scala/better/files/FileWatcher.scala 382 | [FileMonitor.scala]: https://github.com/pathikrit/better-files/blob/2ea6bb694551f1fe6e9ce58dbd1b814391a02e5a/core/src/main/scala/better/files/FileMonitor.scala 383 | [FindFirstChangeNotification]: https://msdn.microsoft.com/en-us/library/aa364417(VS.85).aspx 384 | [fsevents-wiki]: https://en.wikipedia.org/wiki/FSEvents 385 | [ionotify-wiki]: https://en.wikipedia.org/wiki/Inotify 386 | [nio2]: https://docs.oracle.com/javase/tutorial/essential/io/fileio.html 387 | [nio2-wiki]: https://en.wikipedia.org/wiki/Non-blocking_I/O_(Java) 388 | [jsr-51]: https://www.jcp.org/en/jsr/detail?id=51 389 | [javadoc-path]: https://docs.oracle.com/javase/8/docs/api/java/nio/file/Path.html 390 | [javadoc-watchservice]: https://docs.oracle.com/javase/8/docs/api/java/nio/file/WatchService.html 391 | [so-autowatch]: https://github.com/lloydmeta/schwatcher/issues/44 392 | [so-down]: http://memecrunch.com/meme/YBHZ/stackoverflow-is-down/image.jpg 393 | [so-recursive-watching]: http://stackoverflow.com/questions/18701242/how-to-watch-a-folder-and-subfolders-for-changes 394 | [so-only-watch-dirs]: http://stackoverflow.com/questions/16251273/can-i-watch-for-single-file-change-with-watchservice-not-the-whole-directory 395 | -------------------------------------------------------------------------------- /src/test/scala/better/files/benchmarks/Benchmark.scala: -------------------------------------------------------------------------------- 1 | package better.files.benchmarks 2 | 3 | import org.scalatest.funsuite.AnyFunSuite 4 | 5 | trait Benchmark extends AnyFunSuite { 6 | def profile[A](f: => A): (A, Long) = { 7 | val t = System.nanoTime() 8 | (f, ((System.nanoTime() - t) / 1e6).toLong) 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/test/scala/better/files/benchmarks/EncodingBenchmark.scala: -------------------------------------------------------------------------------- 1 | package better.files.benchmarks 2 | 3 | import java.nio.charset.Charset 4 | 5 | import scala.util.Random 6 | 7 | import better.files._ 8 | 9 | class EncodingBenchmark extends Benchmark { 10 | 11 | def testWrite(file: File, charset: Charset) = 12 | profile { 13 | for { 14 | writer <- file.bufferedWriter(charset) 15 | content <- Iterator.continually(Random.nextString(10000)).take(1000) 16 | } writer.write(content + "\n") 17 | } 18 | 19 | def testRead(file: File, charset: Charset) = 20 | profile { 21 | for { 22 | reader <- file.bufferedReader(charset) 23 | line <- reader.lines().autoClosed 24 | } line 25 | } 26 | 27 | def run(charset: Charset) = { 28 | File.temporaryFile() foreach { file => 29 | val (_, w) = testWrite(file, charset) 30 | info(s"Charset=$charset, write=$w ms") 31 | 32 | val (_, r) = testRead(file, charset) 33 | info(s"Charset=$charset, read=$r ms") 34 | } 35 | } 36 | 37 | test("encoding") { 38 | val utf8 = Charset.forName("UTF-8") 39 | run(charset = utf8) 40 | info("-------------") 41 | run(charset = UnicodeCharset(utf8)) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/test/scala/better/files/benchmarks/ScannerBenchmark.scala: -------------------------------------------------------------------------------- 1 | package better.files.benchmarks 2 | 3 | import java.io.{BufferedReader, StringReader} 4 | 5 | import better.files._ 6 | 7 | class ScannerBenchmark extends Benchmark { 8 | val file = File.newTemporaryFile() 9 | val n = 1000 10 | repeat(n) { 11 | file 12 | .appendLine(-n to n mkString " ") 13 | .appendLine("hello " * n) 14 | .appendLine("world " * n) 15 | } 16 | val scanners: Seq[BufferedReader => AbstractScanner] = Seq( 17 | new JavaScanner(_), 18 | new StringBuilderScanner(_), 19 | new CharBufferScanner(_), 20 | new StreamingScanner(_), 21 | new IterableScanner(_), 22 | new IteratorScanner(_), 23 | new BetterFilesScanner(_), 24 | new ArrayBufferScanner(_), 25 | new FastJavaIOScanner2(_), 26 | new FastJavaIOScanner(_) 27 | ) 28 | 29 | def runTest(scanner: AbstractScanner) = { 30 | val (_, time) = profile(run(scanner)) 31 | scanner.close() 32 | info(f"${scanner.getClass.getSimpleName.padTo(25, ' ')}: $time%4d ms") 33 | } 34 | 35 | def run(scanner: AbstractScanner): Unit = 36 | repeat(n) { 37 | assert(scanner.hasNext) 38 | val ints = List.fill(2 * n + 1)(scanner.nextInt()) 39 | val line = "" // scanner.nextLine() 40 | val words = IndexedSeq.fill(2 * n)(scanner.next()) 41 | (line, ints, words) 42 | } 43 | 44 | test("scanner") { 45 | info("Warming up ...") 46 | scanners foreach { scannerBuilder => 47 | val canaryData = 48 | """ 49 | |10 -23 50 | |Hello World 51 | |Hello World 52 | |19 53 | """.stripMargin 54 | val scanner = scannerBuilder(new BufferedReader(new StringReader(canaryData))) 55 | info(s"Testing ${scanner.getClass.getSimpleName} for correctness") 56 | assert(scanner.hasNext) 57 | assert(scanner.nextInt() == 10) 58 | assert(scanner.nextInt() == -23) 59 | assert(scanner.next() == "Hello") 60 | assert(scanner.next() == "World") 61 | val l = scanner.nextLine() 62 | assert(l == "Hello World", l) 63 | assert(scanner.nextInt() == 19) 64 | // assert(!scanner.hasNext) 65 | } 66 | 67 | info("Running benchmark ...") 68 | scanners foreach { scanner => 69 | runTest(scanner(file.newBufferedReader())) 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/test/scala/better/files/benchmarks/Scanners.scala: -------------------------------------------------------------------------------- 1 | package better.files.benchmarks 2 | 3 | import java.io.BufferedReader 4 | 5 | import better.files._ 6 | 7 | /** Base interface to test */ 8 | abstract class AbstractScanner(protected[this] val reader: BufferedReader) { 9 | def hasNext: Boolean 10 | def next(): String 11 | def nextInt() = next().toInt 12 | def nextLine() = reader.readLine() 13 | def close() = reader.close() 14 | } 15 | 16 | /** Based on java.util.Scanner */ 17 | class JavaScanner(reader: BufferedReader) extends AbstractScanner(reader) { 18 | private[this] val scanner = new java.util.Scanner(reader) 19 | override def hasNext = scanner.hasNext 20 | override def next() = scanner.next() 21 | override def nextInt() = scanner.nextInt() 22 | override def nextLine() = { 23 | scanner.nextLine() 24 | scanner.nextLine() 25 | } 26 | override def close() = scanner.close() 27 | } 28 | 29 | /** Based on StringTokenizer + resetting the iterator */ 30 | class IterableScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterable[String] { 31 | override def iterator = 32 | for { 33 | line <- Iterator.continually(reader.readLine()).takeWhile(_ != null) 34 | tokenizer = new java.util.StringTokenizer(line) 35 | _ <- Iterator.continually(tokenizer).takeWhile(_.hasMoreTokens) 36 | } yield tokenizer.nextToken() 37 | 38 | private[this] var current = iterator 39 | override def hasNext = current.hasNext 40 | override def next() = current.next() 41 | override def nextLine() = { 42 | current = iterator 43 | super.nextLine() 44 | } 45 | } 46 | 47 | /** Based on a mutating var StringTokenizer */ 48 | class IteratorScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { 49 | import java.util.StringTokenizer 50 | private[this] val tokenizers = 51 | Iterator.continually(reader.readLine()).takeWhile(_ != null).map(new StringTokenizer(_)).filter(_.hasMoreTokens) 52 | private[this] var current: Option[StringTokenizer] = None 53 | 54 | @inline private[this] def tokenizer(): Option[StringTokenizer] = 55 | current.find(_.hasMoreTokens) orElse { 56 | current = if (tokenizers.hasNext) Some(tokenizers.next()) else None 57 | current 58 | } 59 | override def hasNext = tokenizer().nonEmpty 60 | override def next() = tokenizer().get.nextToken() 61 | override def nextLine() = { 62 | current = None 63 | super.nextLine() 64 | } 65 | } 66 | 67 | /** Based on java.io.StreamTokenizer */ 68 | class StreamingScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { 69 | import java.io.StreamTokenizer 70 | private[this] val in = new StreamTokenizer(reader) 71 | 72 | override def hasNext = in.ttype != StreamTokenizer.TT_EOF 73 | override def next() = { 74 | in.nextToken() 75 | in.sval 76 | } 77 | override def nextInt() = nextDouble().toInt 78 | def nextDouble() = { 79 | in.nextToken() 80 | in.nval 81 | } 82 | } 83 | 84 | /** Based on a reusable StringBuilder */ 85 | class StringBuilderScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { 86 | private[this] val chars = reader.chars.nonClosing() 87 | private[this] val buffer = new StringBuilder() 88 | 89 | override def next() = { 90 | buffer.clear() 91 | while (buffer.isEmpty && hasNext) { 92 | chars.takeWhile(c => !c.isWhitespace).foreach(buffer += _) 93 | } 94 | buffer.toString() 95 | } 96 | override def hasNext = chars.hasNext 97 | } 98 | 99 | /** Scala version of the ArrayBufferScanner */ 100 | class CharBufferScanner(reader: BufferedReader) extends AbstractScanner(reader) with Iterator[String] { 101 | private[this] val chars = reader.chars.nonClosing() 102 | private[this] var buffer = Array.ofDim[Char](1 << 4) 103 | 104 | override def next() = { 105 | var pos = 0 106 | while (pos == 0 && hasNext) { 107 | for { 108 | c <- chars.takeWhile(c => c != ' ' && c != '\n') 109 | } { 110 | if (pos == buffer.length) buffer = java.util.Arrays.copyOf(buffer, 2 * pos) 111 | buffer(pos) = c 112 | pos += 1 113 | } 114 | } 115 | String.copyValueOf(buffer, 0, pos) 116 | } 117 | override def hasNext = chars.hasNext 118 | } 119 | 120 | /** Scanner using https://github.com/williamfiset/FastJavaIO */ 121 | class FastJavaIOScanner(reader: BufferedReader) extends AbstractScanner(reader) { 122 | protected def is: java.io.InputStream = new org.apache.commons.io.input.ReaderInputStream(reader, DefaultCharset) 123 | 124 | private[this] val fastReader = new fastjavaio.InputReader(is) 125 | 126 | override def hasNext = true // TODO: https://github.com/williamfiset/FastJavaIO/issues/3 127 | override def next() = fastReader.readStr() 128 | override def nextInt() = fastReader.readInt() 129 | override def nextLine() = fastReader.readLine() 130 | } 131 | 132 | /** Same as FastJavaIOScanner but uses better-files's Reader => InputStream */ 133 | class FastJavaIOScanner2(reader: BufferedReader) extends FastJavaIOScanner(reader) { 134 | override def is = reader.toInputStream() 135 | } 136 | 137 | /** Based on the better-files implementation */ 138 | class BetterFilesScanner(reader: BufferedReader) extends AbstractScanner(reader) { 139 | private[this] val scanner = Scanner(reader) 140 | override def hasNext = scanner.hasNext 141 | override def next() = scanner.next() 142 | override def nextLine() = scanner.nextLine() 143 | } 144 | -------------------------------------------------------------------------------- /src/test/scala/better/files/test_pkg/ResourceSpecHelper.scala: -------------------------------------------------------------------------------- 1 | package better.files 2 | package test_pkg 3 | 4 | import java.io.InputStream 5 | 6 | class ResourceSpecHelper { 7 | def openTestStream(): InputStream = Resource.my.getAsStream("another-test-file.txt") 8 | } 9 | --------------------------------------------------------------------------------