├── .gitignore ├── .travis.yml ├── LICENSE ├── Main.hs ├── README.md ├── default.nix ├── flake.lock ├── flake.nix └── una.cabal /.gitignore: -------------------------------------------------------------------------------- 1 | /una 2 | /una.hi 3 | /dist/ 4 | /Main.hi 5 | /Main 6 | /Main.eventlog 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: nix 2 | 3 | sudo: true 4 | 5 | git: 6 | depth: 1 7 | 8 | env: 9 | global: 10 | matrix: 11 | - GHCVERSION=ghc822 12 | - GHCVERSION=ghc844 13 | - GHCVERSION=ghc863 14 | 15 | matrix: 16 | allow_failures: 17 | exclude: 18 | 19 | script: 20 | - nix-build --argstr compiler $GHCVERSION 21 | 22 | branches: 23 | only: 24 | - master 25 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2003-2009, John Wiegley. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | - Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | - Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 14 | - Neither the name of New Artisans LLC nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 19 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 20 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 21 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 22 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 23 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 24 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 25 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 26 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /Main.hs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env runhaskell 2 | 3 | {-# LANGUAGE DeriveDataTypeable #-} 4 | {-# OPTIONS_GHC -Wall 5 | -fno-warn-name-shadowing 6 | -fno-warn-missing-signatures 7 | -fno-warn-incomplete-patterns #-} 8 | 9 | module Main where 10 | 11 | -- una.hs, version 2.0 (2010-07-02) 12 | -- 13 | -- by John Wiegley 14 | -- 15 | -- A simple universal unarchiving utility. Just point it at any archive or 16 | -- compressed file, and it spits out a single file or directory in the current 17 | -- directory with its contents. Use -d to delete the original archive on 18 | -- success. Use -f to overwrite any existing file or directory which might be 19 | -- in the way. 20 | -- 21 | -- To handle all the supported formats on Mac OS X, you must first install: 22 | -- sudo port install cabextract unarj unrar lha p7zip 23 | 24 | import Control.Applicative 25 | import Control.Concurrent 26 | import qualified Control.Exception as C 27 | import Control.Monad 28 | import qualified Data.ByteString.Lazy as B 29 | import Data.Char 30 | import Data.Function (fix) 31 | import Data.List 32 | import Data.Maybe 33 | import Data.Traversable hiding (sequence) 34 | import Prelude hiding (sequence) 35 | import System.Console.CmdArgs 36 | import System.Directory 37 | import System.Environment 38 | import System.Exit 39 | import System.FilePath 40 | import System.IO 41 | import System.IO.Storage 42 | import System.Process 43 | 44 | -- This script takes a series of pathnames to compressed files and/or 45 | -- archives, and uncompresses/decodes/unarchives them. 46 | -- 47 | -- What's especially useful about this script is that it guarantees that the 48 | -- result of this process is a single new entry in the current directory, 49 | -- whether that be a file or a directory. That is: 50 | -- 51 | -- 1. If it's simply a compressed file, it uncompresses in the current 52 | -- directory. 53 | -- 2. If it's an archive containing a single file or directory, the result 54 | -- is the same as if it had been compressed: the file or directory 55 | -- ends up in the current directory. 56 | -- 3. If the archive contains multiple items, they are unarchived in a 57 | -- directory named after the original file. 58 | 59 | version :: String 60 | version = "2.0.1" 61 | 62 | copyright :: String 63 | copyright = "2009-2012" 64 | 65 | unaSummary :: String 66 | unaSummary = "una v" ++ version ++ ", (C) John Wiegley " ++ copyright 67 | 68 | data UnaOpts = UnaOpts 69 | { delete_ :: Bool 70 | , force :: Bool 71 | , temp :: FilePath 72 | , output :: FilePath 73 | , sysTemp :: Bool 74 | , test :: Bool 75 | , files :: [FilePath] 76 | } 77 | deriving (Data, Typeable, Show, Eq) 78 | 79 | unaOpts :: UnaOpts 80 | unaOpts = UnaOpts 81 | { delete_ = def &= help "Delete the original archive if successful" 82 | , force = def &= name "f" &= help "Overwrite any existing file/dir" 83 | , temp = def &= typDir &= 84 | help "Use DIR as a temp directory, instead of current" 85 | , output = def &= typDir &= name "o" &= 86 | help "Unarchive to DIR instead of archive's directory" 87 | , sysTemp = def &= name "T" &= 88 | help "Use the system's temp directory (typically /tmp)" 89 | , test = def &= explicit &= name "test" &= 90 | help "Extract, throw away resulting file(s), set error code" 91 | , files = def &= args &= typ "FILE..." 92 | } &= 93 | summary unaSummary &= 94 | program "una" &= 95 | help "Universal recursive unarchiver/decoder/decompressor tool" 96 | 97 | 98 | main :: IO () 99 | main = do 100 | mainArgs <- getArgs 101 | opts <- withArgs (if null mainArgs then ["--help"] else mainArgs) 102 | (cmdArgs unaOpts) 103 | 104 | --when (null (files opts)) $ cmdArgsApply cmdArgsHelp 105 | 106 | -- Extract each archive given on the command-line. If it's not recognizable 107 | -- as an archive, the resulting pathname will be identical to the input, in 108 | -- which case nothing has been done. If an error occurs for a given 109 | -- archive, stop then. 110 | forM_ (files opts) $ \path -> do 111 | result <- withStore "main" $ do putValue "main" "opts" opts 112 | extract path (force opts) 113 | if test opts 114 | then case result of 115 | ArchiveError _ -> exitWith $ ExitFailure 1 116 | FileName fp -> if fp /= path 117 | then removeFilePath fp 118 | else exitWith $ ExitFailure 1 119 | DirectoryName dp -> removeFilePath dp 120 | DataStream _ -> error "Unexpected DataStream" 121 | 122 | else case result of 123 | ArchiveError err -> error err 124 | FileName fp -> if fp /= path 125 | then success path fp "file" (delete_ opts) 126 | else putStrLn $ "Archive unrecognized: " ++ fp 127 | DirectoryName dp -> success path dp "directory" (delete_ opts) 128 | DataStream _ -> error "Unexpected DataStream" 129 | 130 | -- In case of success, print the final product's path and delete the 131 | -- original archive if -d was used. 132 | where success path f kind del = do 133 | rf <- makeRelativeToCurrentDirectory f 134 | putStrLn $ "Extracted " ++ kind ++ ": " ++ rf 135 | when del $ removeFile path 136 | 137 | 138 | getOption :: Data a => (a -> b) -> IO b 139 | getOption option = do 140 | opts <- getValue "main" "opts" 141 | return $ fromJust $ option <$> opts 142 | 143 | -- Determine which "type" an archive is by examining its extension. It may 144 | -- not be an archive at all, but just a compressed file. It could even be a 145 | -- compressed archive containing just a single file! There are too many 146 | -- possible combinations to know at this point. 147 | 148 | extractors :: [(String, [Extractor])] 149 | extractors = 150 | [ (".tar", [tarballExtractor]) 151 | , (".taz", [compressExtractor, tarballExtractor]) 152 | , (".tgz", [gzipExtractor, tarballExtractor]) 153 | , (".tbz", [bzip2Extractor, tarballExtractor]) 154 | , (".tz2", [bzip2Extractor, tarballExtractor]) 155 | , (".txz", [xzipExtractor, tarballExtractor]) 156 | , (".shar", [sharExtractor]) 157 | , (".z", [compressExtractor]) 158 | , (".gz", [gzipExtractor]) 159 | , (".bz2", [bzip2Extractor]) 160 | , (".xz", [xzipExtractor]) 161 | , (".7z", [p7zipExtractor]) 162 | , (".zip", [zipExtractor]) 163 | , (".jar", [zipExtractor]) 164 | , (".arj", [arjExtractor]) 165 | , (".lha", [lhaExtractor]) 166 | , (".lzh", [lhaExtractor]) 167 | , (".rar", [rarExtractor]) 168 | , (".uu", [uuExtractor]) 169 | , (".a", [arExtractor]) 170 | , (".cab", [cabExtractor]) 171 | , (".cpio", [cpioExtractor]) 172 | 173 | , (".gpg", [gpgExtractor]) 174 | , (".asc", [gpgExtractor]) 175 | 176 | , (".dmg", [diskImageExtractor]) 177 | , (".iso", [diskImageExtractor]) 178 | , (".cdr", [diskImageExtractor]) 179 | , (".sparseimage", [diskImageExtractor]) 180 | , (".sparsebundle", [diskImageExtractor]) 181 | 182 | , (".sit", [stuffItExtractor True]) 183 | , (".sea", [stuffItExtractor True]) 184 | , (".bin", [stuffItExtractor False]) 185 | , (".hqx", [stuffItExtractor False]) 186 | 187 | , (".sdk", [shrinkItExtractor]) 188 | , (".shk", [shrinkItExtractor]) 189 | , (".bxy", [shrinkItExtractor]) 190 | , (".bny", [shrinkItExtractor]) 191 | , (".bqy", [shrinkItExtractor]) 192 | ] 193 | 194 | -- Gzip and family are very simple compressors that can handle streaming data 195 | -- quite easily. 196 | 197 | returnStream :: B.ByteString -> IO ExtractionResult 198 | returnStream out = return (DataStream out, noCleanup) 199 | 200 | simpleExtractor :: String -> [String] -> Extraction -> IO ExtractionResult 201 | simpleExtractor cmd args item = 202 | case item of 203 | DataStream d -> performExtract cmd args d returnStream 204 | FileName f -> performExtract cmd (args ++ [f]) B.empty returnStream 205 | 206 | bestExe :: [String] -> IO String 207 | bestExe xs = fromJust . msum <$> traverse findExecutable xs 208 | 209 | gzipExtractor = Extractor False $ \x -> do 210 | exePath <- bestExe ["pigz", "gzip"] 211 | simpleExtractor exePath ["-qdc"] x 212 | compressExtractor = gzipExtractor 213 | bzip2Extractor = Extractor False $ \x -> do 214 | exePath <- bestExe ["pbzip2", "bzip2"] 215 | simpleExtractor exePath ["-qdc"] x 216 | xzipExtractor = Extractor False $ \x -> do 217 | exePath <- bestExe ["pxz", "xz"] 218 | simpleExtractor exePath ["-qdc"] x 219 | 220 | uuExtractor = Extractor False $ simpleExtractor "uudecode" [] 221 | gpgExtractor = Extractor False $ simpleExtractor "gpg" ["-d"] 222 | 223 | -- Tarballs and 7-zip are both archive formats that can accept their input on 224 | -- stdin. It's not likely that someone will compress a 7zip archive, but it's 225 | -- handled anyway. 226 | 227 | returnContents :: FilePath -> B.ByteString -> IO ExtractionResult 228 | returnContents dir _ = examineContents dir True 229 | 230 | tarballExtractor = Extractor True $ \item -> do 231 | dir <- createTempDirectory 232 | case item of 233 | DataStream d -> 234 | performExtract "tar" ["xCf", dir, "-"] d (returnContents dir) 235 | FileName f -> 236 | performExtract "tar" ["xCf", dir, f] B.empty (returnContents dir) 237 | 238 | sharExtractor = Extractor True $ \item -> do 239 | dir <- createTempDirectory 240 | case item of 241 | DataStream d -> 242 | performExtract "unshar" ["-d", dir] d (returnContents dir) 243 | FileName f -> 244 | performExtract "unshar" ["-d", dir, f] B.empty (returnContents dir) 245 | 246 | p7zipExtractor = Extractor True $ \item -> do 247 | dir <- createTempDirectory 248 | case item of 249 | DataStream d -> 250 | performExtract "7za" ["x", "-bd", "-o" ++ dir, "-si"] d 251 | (returnContents dir) 252 | FileName f -> 253 | performExtract "7za" ["x", f, "-bd", "-o" ++ dir] B.empty 254 | (returnContents dir) 255 | 256 | -- Zip and CAB are not quite as flexible as tar and 7-zip, in that they cannot 257 | -- accept the archive via stdin. If there is a stream from an earlier 258 | -- decoding, it must be written to a temp file and then extracted from that. 259 | 260 | zipExtractor = Extractor True $ fix $ \fn item -> 261 | case item of 262 | DataStream d -> extractByTemp d ".zip" fn 263 | FileName f -> do 264 | dir <- createTempDirectory 265 | performExtract "unzip" ["-q", "-d", dir, f] B.empty (returnContents dir) 266 | 267 | cabExtractor = Extractor True $ fix $ \fn item -> 268 | case item of 269 | DataStream d -> extractByTemp d ".cab" fn 270 | FileName f -> do 271 | dir <- createTempDirectory 272 | performExtract "cabextract" ["-q", "-d", dir, f] B.empty 273 | (returnContents dir) 274 | 275 | -- cpio doesn't know how to extract its contents to a particular directory, so 276 | -- a temporary must be created. It can, however, read input from stream. 277 | 278 | extractInTempDir :: String -> [String] -> B.ByteString -> IO ExtractionResult 279 | extractInTempDir cmd args inp = do 280 | dir <- createTempDirectory 281 | cdir <- canonicalizePath dir 282 | cwd <- getCurrentDirectory 283 | ccwd <- canonicalizePath cwd 284 | 285 | C.bracket (setCurrentDirectory cdir) 286 | (\_ -> setCurrentDirectory ccwd) 287 | (\_ -> performExtract cmd args inp (returnContents cdir)) 288 | 289 | cpioExtractor = Extractor True $ \item -> 290 | case item of 291 | DataStream d -> extractInTempDir "cpio" ["-id"] d 292 | FileName f -> extractInTempDir "cpio" ["-idF", f] B.empty 293 | 294 | -- The next set of formats can't handle streaming input, nor can they extract 295 | -- to a specified directory. This is why I call them the "dumb" extractors. 296 | -- Everything must be setup for them in advance. 297 | 298 | dumbExtractor :: String -> String -> String -> Extraction -> IO ExtractionResult 299 | dumbExtractor cmd arg ext item = 300 | case item of 301 | DataStream d -> extractByTemp d ext (dumbExtractor cmd ext arg) 302 | FileName f -> extractInTempDir cmd [arg, f] B.empty 303 | 304 | arjExtractor = Extractor True $ dumbExtractor "unarj" "x" ".arj" 305 | lhaExtractor = Extractor True $ dumbExtractor "lha" "x" ".lha" 306 | rarExtractor = Extractor True $ dumbExtractor "unrar" "x" ".rar" 307 | arExtractor = Extractor True $ dumbExtractor "ar" "x" ".ar" 308 | shrinkItExtractor = Extractor True $ dumbExtractor "nulib2" "-x" ".shk" 309 | 310 | -- Disk images are mountable archives, which means the data must be copied out 311 | -- in order to "extract" it. jww (2010-07-09): We should handle Linux 312 | -- loopbook mounts too. 313 | 314 | diskImageExtractor = Extractor True $ fix $ \fn item -> 315 | case item of 316 | DataStream d -> extractByTemp d ".dmg" fn 317 | FileName f -> do 318 | let args = ["attach", "-readonly", "-mountrandom", "/tmp", 319 | "-noverify", "-noautofsck", f] 320 | 321 | loud <- isLoud 322 | when loud $ putStrLn $ "! hdiutil " ++ unwords args 323 | 324 | (exit, out, _) <- readProcessWithExitCode "hdiutil" args [] 325 | case exit of 326 | ExitFailure _ -> 327 | return (ArchiveError "Failed to attach disk image", noCleanup) 328 | 329 | ExitSuccess -> do 330 | let mountPoint = 331 | case find (isInfixOf "/tmp/dmg") (lines out) of 332 | Just line -> (Just . last . words) line 333 | Nothing -> Nothing 334 | case mountPoint of 335 | Nothing -> return (ArchiveError "Failed to attach disk image", 336 | noCleanup) 337 | Just dir -> do 338 | tmpDir <- createTempDirectory 339 | 340 | when loud $ 341 | putStrLn $ "! ditto " ++ unwords [dir, tmpDir] 342 | code <- readProcessWithExitCode "ditto" [dir, tmpDir] [] 343 | _ <- case code of (ExitFailure _, _, _) -> error "ditto: failed" 344 | 345 | when loud $ 346 | putStrLn $ "! hdiutil " ++ unwords ["detach", dir, "-force"] 347 | code <- readProcessWithExitCode "hdiutil" 348 | ["detach", dir, "-force"] [] 349 | _ <- case code of (ExitFailure _, _, _) -> error "hdiutil: failed" 350 | 351 | examineContents tmpDir True 352 | 353 | -- StuffIt Expander is its own creature. We talk to it via Applescript, as I 354 | -- know of no better way. 355 | 356 | stuffItExtractor archivep = Extractor archivep $ fix $ \fn item -> 357 | case item of 358 | DataStream d -> extractByTemp d ".sit" fn 359 | FileName f -> do 360 | tmpDir <- createTempDirectory 361 | canonTmp <- canonicalizePath tmpDir 362 | 363 | let script = " tell application \"StuffIt Expander\"\n" 364 | ++ " run\n" 365 | ++ " expand {POSIX file \"" ++ f ++ "\"}" 366 | -- ++ " with delete originals" 367 | ++ " to POSIX file \"" ++ canonTmp ++ "\"\n" 368 | ++ " end tell" 369 | 370 | loud <- isLoud 371 | when loud $ putStrLn "! invoke StuffIt Expander" 372 | 373 | (exit, _, err) <- readProcessWithExitCode "osascript" [] script 374 | case exit of 375 | ExitSuccess -> 376 | if archivep 377 | then examineContents canonTmp True 378 | else do contents <- getDirectoryContents canonTmp 379 | let elems = delete "." $ delete ".." contents 380 | file = C.assert (length elems == 1) 381 | (canonTmp head elems) 382 | exists <- doesFileExist file 383 | return (FileName $ C.assert exists file, 384 | removeDirectoryRecursive canonTmp) 385 | ExitFailure _ -> return (ArchiveError $ 386 | "Failed to invoke StuffIt Expander: " ++ err, 387 | removeDirectoryRecursive canonTmp) 388 | 389 | -- Types used by this script. 390 | 391 | data Extraction = DataStream B.ByteString 392 | | FileName FilePath 393 | | DirectoryName FilePath 394 | | ArchiveError String 395 | deriving Show 396 | 397 | type ExtractionResult = (Extraction, IO ()) 398 | 399 | data Extractor = Extractor { isArchive :: Bool 400 | , extractor :: Extraction -> IO ExtractionResult } 401 | 402 | noCleanup :: IO () 403 | noCleanup = return () 404 | 405 | -- Given a file path, determine its type and extract its contents. Depending 406 | -- on the type of the file, and what command-line options the extraction 407 | -- command supports, the result may be one of several types, described by the 408 | -- type Extraction. 409 | 410 | extract :: FilePath -> Bool -> IO Extraction 411 | extract rpath overwrite = do 412 | path <- canonicalizePath rpath 413 | pexists <- doesFileExist path 414 | 415 | unless pexists $ error $ "File does not exist: " ++ path 416 | 417 | destination <- getDestination 418 | fexists <- doesFileExist destination 419 | dexists <- doesDirectoryExist destination 420 | 421 | when (fexists || dexists) $ 422 | if overwrite 423 | then if fexists 424 | then removeFile destination 425 | else removeDirectoryRecursive destination 426 | else unless (null typs) $ 427 | error $ "Destination already exists: " ++ destination 428 | 429 | -- Recursively perform all the extractions determined by typs, starting with 430 | -- the input Extraction, which simply identifies the data source path. This 431 | -- same path was used to determine typs, rather than sniffing the data for 432 | -- identifying marks (jww (2012-09-06): sniff data to allow complex stream 433 | -- extractions in future). 434 | extract' typs (FileName path, return ()) 435 | 436 | where 437 | (basename, typs) = findExtractors [] rpath 438 | 439 | getDestination = do 440 | destpath <- getOption output 441 | cwd <- getCurrentDirectory 442 | return $ (if null destpath 443 | then cwd 444 | else destpath) takeFileName basename 445 | 446 | wrap :: IO () -> IO Extraction -> IO Extraction 447 | wrap = C.bracket_ (return ()) 448 | 449 | extract' :: [Extractor] -> (Extraction, IO ()) -> IO Extraction 450 | 451 | -- The variations of extract' receive a list of archive types yet to be 452 | -- "unwrapped" from the previous extraction, plus a cleanup action which 453 | -- must be executed before the final result is returned. The end result 454 | -- is that only the final extraction remains, indicated by the return 455 | -- value, with all temporaries having been properly cleaned up. 456 | 457 | extract' _ (x@(ArchiveError _),m) = wrap m $ return x 458 | 459 | extract' [] (DataStream d,m) = wrap m $ do 460 | -- If we reach the final step of the unarchiving process, and the result 461 | -- is a data stream, write it to disk at the desired basename. 462 | destination <- getDestination 463 | B.writeFile destination d 464 | return $ FileName destination 465 | 466 | extract' [] (FileName f,m) = wrap m $ do 467 | destination <- getDestination 468 | -- Don't rename the file 469 | let realdest = dropFileName destination takeFileName f 470 | renameFile f realdest 471 | return $ FileName realdest 472 | 473 | extract' [] (DirectoryName f,m) = wrap m $ do 474 | destination <- getDestination 475 | renameDirectory f destination 476 | return $ DirectoryName destination 477 | 478 | extract' (t:ts) (x,m) = wrap m $ do 479 | -- Each extractor returns an ExtractionResult, which identifies the form 480 | -- of the extraction (is the result in a file, on a data stream, etc.), 481 | -- and a cleanup action in IO, which should be performed only after any 482 | -- sub-extractions have taken place. 483 | y <- extractor t x 484 | 485 | -- If t is an archive extractor, we'll let examineContents decide if it 486 | -- contains an item needing further extraction. Otherwise, if there are 487 | -- successive compression or encoding stages, process them recursively. 488 | extract' (if isArchive t then [] else ts) y 489 | 490 | findExtractors :: [Extractor] -> FilePath -> (FilePath, [Extractor]) 491 | findExtractors acc f = 492 | apply $ lookup (map toLower (takeExtension f)) extractors 493 | where 494 | apply (Just types) = findExtractors (acc ++ types) (dropExtension f) 495 | apply Nothing = (f, acc) 496 | 497 | performExtract :: String -- command to execute 498 | -> [String] -- command arguments 499 | -> B.ByteString -- standard input 500 | -> (B.ByteString -> IO ExtractionResult) 501 | -- function to process output 502 | -> IO ExtractionResult 503 | performExtract cmd args ds fn = do 504 | (exit, out, err) <- bReadProcessWithExitCode cmd args ds 505 | if exit == ExitSuccess 506 | then fn out 507 | else return (ArchiveError err, noCleanup) 508 | 509 | examineContents :: FilePath -> Bool -> IO ExtractionResult 510 | examineContents dir cleanup = do 511 | -- Examine the contents of a populated directory 512 | -- if it's a single file, return the name and a cleanup action to 513 | -- remove temp 514 | -- if it's a single directory, recurse this step 515 | -- if it's many files and/or directories, return the name of the temp 516 | -- directory, and no cleanup action 517 | canon <- canonicalizePath dir 518 | contents <- getDirectoryContents canon 519 | case delete "." $ delete ".." contents of 520 | [] -> return (ArchiveError "Empty archive", removeDirectoryRecursive canon) 521 | [x] -> do 522 | let path = canon x 523 | isDir <- doesDirectoryExist path 524 | if isDir 525 | then do (x,m) <- examineContents path False 526 | return (x, do when cleanup $ removeDirectoryRecursive canon; m) 527 | else do x <- extract path False 528 | return (x, when cleanup $ removeDirectoryRecursive canon) 529 | 530 | _ -> return (DirectoryName canon, noCleanup) 531 | 532 | 533 | extractByTemp :: B.ByteString -- output to write to temp 534 | -> String -- the temporary file extension 535 | -> (Extraction -> IO ExtractionResult) 536 | -- function to handle the new temp file 537 | -> IO ExtractionResult 538 | extractByTemp ds ext fn = do 539 | dir <- workingDirectory 540 | (path, handle) <- openBinaryTempFile dir ("file" ++ ext) 541 | loud <- isLoud 542 | when loud $ putStrLn $ "> " ++ path 543 | B.hPut handle ds 544 | hFlush handle 545 | (x,m) <- fn (FileName path) 546 | hClose handle 547 | return (x, do removeFile path; m) 548 | 549 | createTempDirectory :: IO FilePath 550 | createTempDirectory = do 551 | dir <- workingDirectory 552 | (path, handle) <- openBinaryTempFile dir "dir.tmp" 553 | hClose handle 554 | removeFile path 555 | createDirectory path 556 | return path 557 | 558 | workingDirectory :: IO FilePath 559 | workingDirectory = do 560 | sysp <- getOption sysTemp 561 | if sysp 562 | then getTemporaryDirectory 563 | else do dir <- getOption temp 564 | if null dir 565 | then return "." 566 | else return dir 567 | 568 | removeFilePath :: FilePath -> IO () 569 | removeFilePath path = do 570 | fexists <- doesFileExist path 571 | dexists <- doesDirectoryExist path 572 | when (fexists || dexists) $ 573 | if fexists 574 | then removeFile path 575 | else removeDirectoryRecursive path 576 | 577 | -- The following function was copied from System.Process, because I needed a 578 | -- variant which operates on lazy ByteStrings. The regular version attempts 579 | -- to decode Unicode in the binary output from the decompressor. 580 | 581 | bReadProcessWithExitCode 582 | :: FilePath -- ^ command to run 583 | -> [String] -- ^ any arguments 584 | -> B.ByteString -- ^ standard input 585 | -> IO (ExitCode,B.ByteString,String) -- ^ exitcode, stdout, stderr 586 | bReadProcessWithExitCode cmd args input = do 587 | (Just inh, Just outh, Just errh, pid) <- 588 | createProcess (proc cmd args){ std_in = CreatePipe 589 | , std_out = CreatePipe 590 | , std_err = CreatePipe } 591 | 592 | outMVar <- newEmptyMVar 593 | 594 | -- fork off a thread to start consuming stdout 595 | hSetBinaryMode outh True 596 | out <- B.hGetContents outh 597 | _ <- forkIO $ C.evaluate (B.length out) >> putMVar outMVar () 598 | 599 | -- fork off a thread to start consuming stderr 600 | hSetBinaryMode errh False 601 | err <- hGetContents errh 602 | _ <- forkIO $ C.evaluate (length err) >> putMVar outMVar () 603 | 604 | -- now write and flush any input 605 | loud <- isLoud 606 | if loud 607 | then if B.null input 608 | then putStrLn $ "! " ++ cmd ++ " " ++ unwords args 609 | else do putStrLn $ "| " ++ cmd ++ " " ++ unwords args 610 | B.hPutStr inh input; hFlush inh 611 | else unless (B.null input) $ do B.hPutStr inh input; hFlush inh 612 | hClose inh -- done with stdin 613 | 614 | -- wait on the output 615 | takeMVar outMVar 616 | takeMVar outMVar 617 | hClose outh 618 | hClose errh 619 | 620 | -- wait on the process 621 | ex <- waitForProcess pid 622 | 623 | return (ex, out, err) 624 | 625 | -- Main.hs (una) ends here 626 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # una: Universal un-archiver 2 | 3 | Version 1.0, by John Wiegley 4 | 5 | This is a "universal", recursive unarchiver, written because I'm too lazy to 6 | remember all the extraction options for the large number of archive formats I 7 | deal with. 8 | 9 | Optional dependencies: 10 | 11 | - StuffIt Expander (free, expander-only version) 12 | - MacPorts: unarj, unrar, lha, p7zip, cabextract 13 | 14 | ## Usage 15 | 16 | una [OPTION] ARCHIVE... 17 | 18 | If no OPTION is specified, the default action is to extract the archive's 19 | contents into the current directory. 20 | 21 | Options: 22 | -h, --help show help 23 | -d, --delete delete the archive if sucessfully extracted 24 | -f, --overwrite overwrite any existing file 25 | 26 | This script is also smart about unarchiving: 27 | 28 | a) if all the contents of an archive would already extract into a single 29 | directory, do that; 30 | 31 | b) if the archive contains only one item, extract it into the current 32 | directory; 33 | 34 | c) otherwise, if the archive would dump multiple contents into the current 35 | directory, create a new directory based on the name of the archive, 36 | sans extension, and put everything there. 37 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | (import ( 2 | fetchTarball { 3 | url = "https://github.com/edolstra/flake-compat/archive/35bb57c0c8d8b62bbfd284272c928ceb64ddbde9.tar.gz"; 4 | sha256 = "1prd9b1xx8c0sfwnyzkspplh30m613j42l1k789s521f4kv4c2z2"; } 5 | ) { 6 | src = ./.; 7 | }).defaultNix 8 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "HTTP": { 4 | "flake": false, 5 | "locked": { 6 | "lastModified": 1451647621, 7 | "narHash": "sha256-oHIyw3x0iKBexEo49YeUDV1k74ZtyYKGR2gNJXXRxts=", 8 | "owner": "phadej", 9 | "repo": "HTTP", 10 | "rev": "9bc0996d412fef1787449d841277ef663ad9a915", 11 | "type": "github" 12 | }, 13 | "original": { 14 | "owner": "phadej", 15 | "repo": "HTTP", 16 | "type": "github" 17 | } 18 | }, 19 | "cabal-32": { 20 | "flake": false, 21 | "locked": { 22 | "lastModified": 1603716527, 23 | "narHash": "sha256-X0TFfdD4KZpwl0Zr6x+PLxUt/VyKQfX7ylXHdmZIL+w=", 24 | "owner": "haskell", 25 | "repo": "cabal", 26 | "rev": "48bf10787e27364730dd37a42b603cee8d6af7ee", 27 | "type": "github" 28 | }, 29 | "original": { 30 | "owner": "haskell", 31 | "ref": "3.2", 32 | "repo": "cabal", 33 | "type": "github" 34 | } 35 | }, 36 | "cabal-34": { 37 | "flake": false, 38 | "locked": { 39 | "lastModified": 1645834128, 40 | "narHash": "sha256-wG3d+dOt14z8+ydz4SL7pwGfe7SiimxcD/LOuPCV6xM=", 41 | "owner": "haskell", 42 | "repo": "cabal", 43 | "rev": "5ff598c67f53f7c4f48e31d722ba37172230c462", 44 | "type": "github" 45 | }, 46 | "original": { 47 | "owner": "haskell", 48 | "ref": "3.4", 49 | "repo": "cabal", 50 | "type": "github" 51 | } 52 | }, 53 | "cabal-36": { 54 | "flake": false, 55 | "locked": { 56 | "lastModified": 1669081697, 57 | "narHash": "sha256-I5or+V7LZvMxfbYgZATU4awzkicBwwok4mVoje+sGmU=", 58 | "owner": "haskell", 59 | "repo": "cabal", 60 | "rev": "8fd619e33d34924a94e691c5fea2c42f0fc7f144", 61 | "type": "github" 62 | }, 63 | "original": { 64 | "owner": "haskell", 65 | "ref": "3.6", 66 | "repo": "cabal", 67 | "type": "github" 68 | } 69 | }, 70 | "cardano-shell": { 71 | "flake": false, 72 | "locked": { 73 | "lastModified": 1608537748, 74 | "narHash": "sha256-PulY1GfiMgKVnBci3ex4ptk2UNYMXqGjJOxcPy2KYT4=", 75 | "owner": "input-output-hk", 76 | "repo": "cardano-shell", 77 | "rev": "9392c75087cb9a3d453998f4230930dea3a95725", 78 | "type": "github" 79 | }, 80 | "original": { 81 | "owner": "input-output-hk", 82 | "repo": "cardano-shell", 83 | "type": "github" 84 | } 85 | }, 86 | "flake-compat": { 87 | "flake": false, 88 | "locked": { 89 | "lastModified": 1672831974, 90 | "narHash": "sha256-z9k3MfslLjWQfnjBtEtJZdq3H7kyi2kQtUThfTgdRk0=", 91 | "owner": "input-output-hk", 92 | "repo": "flake-compat", 93 | "rev": "45f2638735f8cdc40fe302742b79f248d23eb368", 94 | "type": "github" 95 | }, 96 | "original": { 97 | "owner": "input-output-hk", 98 | "ref": "hkm/gitlab-fix", 99 | "repo": "flake-compat", 100 | "type": "github" 101 | } 102 | }, 103 | "flake-utils": { 104 | "inputs": { 105 | "systems": "systems" 106 | }, 107 | "locked": { 108 | "lastModified": 1731533236, 109 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 110 | "owner": "numtide", 111 | "repo": "flake-utils", 112 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 113 | "type": "github" 114 | }, 115 | "original": { 116 | "owner": "numtide", 117 | "repo": "flake-utils", 118 | "type": "github" 119 | } 120 | }, 121 | "ghc-8.6.5-iohk": { 122 | "flake": false, 123 | "locked": { 124 | "lastModified": 1600920045, 125 | "narHash": "sha256-DO6kxJz248djebZLpSzTGD6s8WRpNI9BTwUeOf5RwY8=", 126 | "owner": "input-output-hk", 127 | "repo": "ghc", 128 | "rev": "95713a6ecce4551240da7c96b6176f980af75cae", 129 | "type": "github" 130 | }, 131 | "original": { 132 | "owner": "input-output-hk", 133 | "ref": "release/8.6.5-iohk", 134 | "repo": "ghc", 135 | "type": "github" 136 | } 137 | }, 138 | "hackage": { 139 | "flake": false, 140 | "locked": { 141 | "lastModified": 1748219229, 142 | "narHash": "sha256-xiqhny0WsLuK1jCM2vsD0qzxfpRi6e6xU4BwCjhbgGc=", 143 | "owner": "input-output-hk", 144 | "repo": "hackage.nix", 145 | "rev": "7a4e218bd6c60cb13c8f07e46ad85badf3397c5b", 146 | "type": "github" 147 | }, 148 | "original": { 149 | "owner": "input-output-hk", 150 | "repo": "hackage.nix", 151 | "type": "github" 152 | } 153 | }, 154 | "hackage-for-stackage": { 155 | "flake": false, 156 | "locked": { 157 | "lastModified": 1748219218, 158 | "narHash": "sha256-kKe1cGUGkwp/6704BTKlH4yWTL0wmZugofJU20PcIkA=", 159 | "owner": "input-output-hk", 160 | "repo": "hackage.nix", 161 | "rev": "d3c929097030b8405f983de59ea243018d7cf877", 162 | "type": "github" 163 | }, 164 | "original": { 165 | "owner": "input-output-hk", 166 | "ref": "for-stackage", 167 | "repo": "hackage.nix", 168 | "type": "github" 169 | } 170 | }, 171 | "haskellNix": { 172 | "inputs": { 173 | "HTTP": "HTTP", 174 | "cabal-32": "cabal-32", 175 | "cabal-34": "cabal-34", 176 | "cabal-36": "cabal-36", 177 | "cardano-shell": "cardano-shell", 178 | "flake-compat": "flake-compat", 179 | "ghc-8.6.5-iohk": "ghc-8.6.5-iohk", 180 | "hackage": "hackage", 181 | "hackage-for-stackage": "hackage-for-stackage", 182 | "hls": "hls", 183 | "hls-1.10": "hls-1.10", 184 | "hls-2.0": "hls-2.0", 185 | "hls-2.10": "hls-2.10", 186 | "hls-2.2": "hls-2.2", 187 | "hls-2.3": "hls-2.3", 188 | "hls-2.4": "hls-2.4", 189 | "hls-2.5": "hls-2.5", 190 | "hls-2.6": "hls-2.6", 191 | "hls-2.7": "hls-2.7", 192 | "hls-2.8": "hls-2.8", 193 | "hls-2.9": "hls-2.9", 194 | "hpc-coveralls": "hpc-coveralls", 195 | "iserv-proxy": "iserv-proxy", 196 | "nixpkgs": [ 197 | "haskellNix", 198 | "nixpkgs-unstable" 199 | ], 200 | "nixpkgs-2305": "nixpkgs-2305", 201 | "nixpkgs-2311": "nixpkgs-2311", 202 | "nixpkgs-2405": "nixpkgs-2405", 203 | "nixpkgs-2411": "nixpkgs-2411", 204 | "nixpkgs-unstable": "nixpkgs-unstable", 205 | "old-ghc-nix": "old-ghc-nix", 206 | "stackage": "stackage" 207 | }, 208 | "locked": { 209 | "lastModified": 1748220732, 210 | "narHash": "sha256-Io2eq6g94/HCdtU8Xb4/qUawzFvBVNBmme7uOjQiH+o=", 211 | "owner": "input-output-hk", 212 | "repo": "haskell.nix", 213 | "rev": "339479e6413c1974395ca807f55511013ceb0ac6", 214 | "type": "github" 215 | }, 216 | "original": { 217 | "owner": "input-output-hk", 218 | "repo": "haskell.nix", 219 | "type": "github" 220 | } 221 | }, 222 | "hls": { 223 | "flake": false, 224 | "locked": { 225 | "lastModified": 1741604408, 226 | "narHash": "sha256-tuq3+Ip70yu89GswZ7DSINBpwRprnWnl6xDYnS4GOsc=", 227 | "owner": "haskell", 228 | "repo": "haskell-language-server", 229 | "rev": "682d6894c94087da5e566771f25311c47e145359", 230 | "type": "github" 231 | }, 232 | "original": { 233 | "owner": "haskell", 234 | "repo": "haskell-language-server", 235 | "type": "github" 236 | } 237 | }, 238 | "hls-1.10": { 239 | "flake": false, 240 | "locked": { 241 | "lastModified": 1680000865, 242 | "narHash": "sha256-rc7iiUAcrHxwRM/s0ErEsSPxOR3u8t7DvFeWlMycWgo=", 243 | "owner": "haskell", 244 | "repo": "haskell-language-server", 245 | "rev": "b08691db779f7a35ff322b71e72a12f6e3376fd9", 246 | "type": "github" 247 | }, 248 | "original": { 249 | "owner": "haskell", 250 | "ref": "1.10.0.0", 251 | "repo": "haskell-language-server", 252 | "type": "github" 253 | } 254 | }, 255 | "hls-2.0": { 256 | "flake": false, 257 | "locked": { 258 | "lastModified": 1687698105, 259 | "narHash": "sha256-OHXlgRzs/kuJH8q7Sxh507H+0Rb8b7VOiPAjcY9sM1k=", 260 | "owner": "haskell", 261 | "repo": "haskell-language-server", 262 | "rev": "783905f211ac63edf982dd1889c671653327e441", 263 | "type": "github" 264 | }, 265 | "original": { 266 | "owner": "haskell", 267 | "ref": "2.0.0.1", 268 | "repo": "haskell-language-server", 269 | "type": "github" 270 | } 271 | }, 272 | "hls-2.10": { 273 | "flake": false, 274 | "locked": { 275 | "lastModified": 1743069404, 276 | "narHash": "sha256-q4kDFyJDDeoGqfEtrZRx4iqMVEC2MOzCToWsFY+TOzY=", 277 | "owner": "haskell", 278 | "repo": "haskell-language-server", 279 | "rev": "2318c61db3a01e03700bd4b05665662929b7fe8b", 280 | "type": "github" 281 | }, 282 | "original": { 283 | "owner": "haskell", 284 | "ref": "2.10.0.0", 285 | "repo": "haskell-language-server", 286 | "type": "github" 287 | } 288 | }, 289 | "hls-2.2": { 290 | "flake": false, 291 | "locked": { 292 | "lastModified": 1693064058, 293 | "narHash": "sha256-8DGIyz5GjuCFmohY6Fa79hHA/p1iIqubfJUTGQElbNk=", 294 | "owner": "haskell", 295 | "repo": "haskell-language-server", 296 | "rev": "b30f4b6cf5822f3112c35d14a0cba51f3fe23b85", 297 | "type": "github" 298 | }, 299 | "original": { 300 | "owner": "haskell", 301 | "ref": "2.2.0.0", 302 | "repo": "haskell-language-server", 303 | "type": "github" 304 | } 305 | }, 306 | "hls-2.3": { 307 | "flake": false, 308 | "locked": { 309 | "lastModified": 1695910642, 310 | "narHash": "sha256-tR58doOs3DncFehHwCLczJgntyG/zlsSd7DgDgMPOkI=", 311 | "owner": "haskell", 312 | "repo": "haskell-language-server", 313 | "rev": "458ccdb55c9ea22cd5d13ec3051aaefb295321be", 314 | "type": "github" 315 | }, 316 | "original": { 317 | "owner": "haskell", 318 | "ref": "2.3.0.0", 319 | "repo": "haskell-language-server", 320 | "type": "github" 321 | } 322 | }, 323 | "hls-2.4": { 324 | "flake": false, 325 | "locked": { 326 | "lastModified": 1699862708, 327 | "narHash": "sha256-YHXSkdz53zd0fYGIYOgLt6HrA0eaRJi9mXVqDgmvrjk=", 328 | "owner": "haskell", 329 | "repo": "haskell-language-server", 330 | "rev": "54507ef7e85fa8e9d0eb9a669832a3287ffccd57", 331 | "type": "github" 332 | }, 333 | "original": { 334 | "owner": "haskell", 335 | "ref": "2.4.0.1", 336 | "repo": "haskell-language-server", 337 | "type": "github" 338 | } 339 | }, 340 | "hls-2.5": { 341 | "flake": false, 342 | "locked": { 343 | "lastModified": 1701080174, 344 | "narHash": "sha256-fyiR9TaHGJIIR0UmcCb73Xv9TJq3ht2ioxQ2mT7kVdc=", 345 | "owner": "haskell", 346 | "repo": "haskell-language-server", 347 | "rev": "27f8c3d3892e38edaef5bea3870161815c4d014c", 348 | "type": "github" 349 | }, 350 | "original": { 351 | "owner": "haskell", 352 | "ref": "2.5.0.0", 353 | "repo": "haskell-language-server", 354 | "type": "github" 355 | } 356 | }, 357 | "hls-2.6": { 358 | "flake": false, 359 | "locked": { 360 | "lastModified": 1705325287, 361 | "narHash": "sha256-+P87oLdlPyMw8Mgoul7HMWdEvWP/fNlo8jyNtwME8E8=", 362 | "owner": "haskell", 363 | "repo": "haskell-language-server", 364 | "rev": "6e0b342fa0327e628610f2711f8c3e4eaaa08b1e", 365 | "type": "github" 366 | }, 367 | "original": { 368 | "owner": "haskell", 369 | "ref": "2.6.0.0", 370 | "repo": "haskell-language-server", 371 | "type": "github" 372 | } 373 | }, 374 | "hls-2.7": { 375 | "flake": false, 376 | "locked": { 377 | "lastModified": 1708965829, 378 | "narHash": "sha256-LfJ+TBcBFq/XKoiNI7pc4VoHg4WmuzsFxYJ3Fu+Jf+M=", 379 | "owner": "haskell", 380 | "repo": "haskell-language-server", 381 | "rev": "50322b0a4aefb27adc5ec42f5055aaa8f8e38001", 382 | "type": "github" 383 | }, 384 | "original": { 385 | "owner": "haskell", 386 | "ref": "2.7.0.0", 387 | "repo": "haskell-language-server", 388 | "type": "github" 389 | } 390 | }, 391 | "hls-2.8": { 392 | "flake": false, 393 | "locked": { 394 | "lastModified": 1715153580, 395 | "narHash": "sha256-Vi/iUt2pWyUJlo9VrYgTcbRviWE0cFO6rmGi9rmALw0=", 396 | "owner": "haskell", 397 | "repo": "haskell-language-server", 398 | "rev": "dd1be1beb16700de59e0d6801957290bcf956a0a", 399 | "type": "github" 400 | }, 401 | "original": { 402 | "owner": "haskell", 403 | "ref": "2.8.0.0", 404 | "repo": "haskell-language-server", 405 | "type": "github" 406 | } 407 | }, 408 | "hls-2.9": { 409 | "flake": false, 410 | "locked": { 411 | "lastModified": 1719993701, 412 | "narHash": "sha256-wy348++MiMm/xwtI9M3vVpqj2qfGgnDcZIGXw8sF1sA=", 413 | "owner": "haskell", 414 | "repo": "haskell-language-server", 415 | "rev": "90319a7e62ab93ab65a95f8f2bcf537e34dae76a", 416 | "type": "github" 417 | }, 418 | "original": { 419 | "owner": "haskell", 420 | "ref": "2.9.0.1", 421 | "repo": "haskell-language-server", 422 | "type": "github" 423 | } 424 | }, 425 | "hpc-coveralls": { 426 | "flake": false, 427 | "locked": { 428 | "lastModified": 1607498076, 429 | "narHash": "sha256-8uqsEtivphgZWYeUo5RDUhp6bO9j2vaaProQxHBltQk=", 430 | "owner": "sevanspowell", 431 | "repo": "hpc-coveralls", 432 | "rev": "14df0f7d229f4cd2e79f8eabb1a740097fdfa430", 433 | "type": "github" 434 | }, 435 | "original": { 436 | "owner": "sevanspowell", 437 | "repo": "hpc-coveralls", 438 | "type": "github" 439 | } 440 | }, 441 | "iserv-proxy": { 442 | "flake": false, 443 | "locked": { 444 | "lastModified": 1747047742, 445 | "narHash": "sha256-PCDULyZSIPdDdF8Lanbcy+Dl6AJ5z6H2ng3sRsv+gwc=", 446 | "owner": "stable-haskell", 447 | "repo": "iserv-proxy", 448 | "rev": "dea34de4bde325aca22472c18d659bee7800b477", 449 | "type": "github" 450 | }, 451 | "original": { 452 | "owner": "stable-haskell", 453 | "ref": "iserv-syms", 454 | "repo": "iserv-proxy", 455 | "type": "github" 456 | } 457 | }, 458 | "nixpkgs-2305": { 459 | "locked": { 460 | "lastModified": 1705033721, 461 | "narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=", 462 | "owner": "NixOS", 463 | "repo": "nixpkgs", 464 | "rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea", 465 | "type": "github" 466 | }, 467 | "original": { 468 | "owner": "NixOS", 469 | "ref": "nixpkgs-23.05-darwin", 470 | "repo": "nixpkgs", 471 | "type": "github" 472 | } 473 | }, 474 | "nixpkgs-2311": { 475 | "locked": { 476 | "lastModified": 1719957072, 477 | "narHash": "sha256-gvFhEf5nszouwLAkT9nWsDzocUTqLWHuL++dvNjMp9I=", 478 | "owner": "NixOS", 479 | "repo": "nixpkgs", 480 | "rev": "7144d6241f02d171d25fba3edeaf15e0f2592105", 481 | "type": "github" 482 | }, 483 | "original": { 484 | "owner": "NixOS", 485 | "ref": "nixpkgs-23.11-darwin", 486 | "repo": "nixpkgs", 487 | "type": "github" 488 | } 489 | }, 490 | "nixpkgs-2405": { 491 | "locked": { 492 | "lastModified": 1735564410, 493 | "narHash": "sha256-HB/FA0+1gpSs8+/boEavrGJH+Eq08/R2wWNph1sM1Dg=", 494 | "owner": "NixOS", 495 | "repo": "nixpkgs", 496 | "rev": "1e7a8f391f1a490460760065fa0630b5520f9cf8", 497 | "type": "github" 498 | }, 499 | "original": { 500 | "owner": "NixOS", 501 | "ref": "nixpkgs-24.05-darwin", 502 | "repo": "nixpkgs", 503 | "type": "github" 504 | } 505 | }, 506 | "nixpkgs-2411": { 507 | "locked": { 508 | "lastModified": 1746566971, 509 | "narHash": "sha256-I40weT0FZWth1IEjgR5a0zC9LLyrPwTC0DAQcejtTJE=", 510 | "owner": "NixOS", 511 | "repo": "nixpkgs", 512 | "rev": "209c5b3b0f5cf5b5a7e12ddea59bf19699f97e75", 513 | "type": "github" 514 | }, 515 | "original": { 516 | "owner": "NixOS", 517 | "ref": "nixpkgs-24.11-darwin", 518 | "repo": "nixpkgs", 519 | "type": "github" 520 | } 521 | }, 522 | "nixpkgs-unstable": { 523 | "locked": { 524 | "lastModified": 1746576598, 525 | "narHash": "sha256-FshoQvr6Aor5SnORVvh/ZdJ1Sa2U4ZrIMwKBX5k2wu0=", 526 | "owner": "NixOS", 527 | "repo": "nixpkgs", 528 | "rev": "b3582c75c7f21ce0b429898980eddbbf05c68e55", 529 | "type": "github" 530 | }, 531 | "original": { 532 | "owner": "NixOS", 533 | "ref": "nixpkgs-unstable", 534 | "repo": "nixpkgs", 535 | "type": "github" 536 | } 537 | }, 538 | "old-ghc-nix": { 539 | "flake": false, 540 | "locked": { 541 | "lastModified": 1631092763, 542 | "narHash": "sha256-sIKgO+z7tj4lw3u6oBZxqIhDrzSkvpHtv0Kki+lh9Fg=", 543 | "owner": "angerman", 544 | "repo": "old-ghc-nix", 545 | "rev": "af48a7a7353e418119b6dfe3cd1463a657f342b8", 546 | "type": "github" 547 | }, 548 | "original": { 549 | "owner": "angerman", 550 | "ref": "master", 551 | "repo": "old-ghc-nix", 552 | "type": "github" 553 | } 554 | }, 555 | "root": { 556 | "inputs": { 557 | "flake-utils": "flake-utils", 558 | "haskellNix": "haskellNix", 559 | "nixpkgs": [ 560 | "haskellNix", 561 | "nixpkgs-unstable" 562 | ] 563 | } 564 | }, 565 | "stackage": { 566 | "flake": false, 567 | "locked": { 568 | "lastModified": 1748218423, 569 | "narHash": "sha256-Kxq6dht95EwFzqxqM1SlGuzxgvjyrZSHcnAIMz4imV4=", 570 | "owner": "input-output-hk", 571 | "repo": "stackage.nix", 572 | "rev": "9e099770ef4546bb9534db7cf08d4813ece553db", 573 | "type": "github" 574 | }, 575 | "original": { 576 | "owner": "input-output-hk", 577 | "repo": "stackage.nix", 578 | "type": "github" 579 | } 580 | }, 581 | "systems": { 582 | "locked": { 583 | "lastModified": 1681028828, 584 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 585 | "owner": "nix-systems", 586 | "repo": "default", 587 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 588 | "type": "github" 589 | }, 590 | "original": { 591 | "owner": "nix-systems", 592 | "repo": "default", 593 | "type": "github" 594 | } 595 | } 596 | }, 597 | "root": "root", 598 | "version": 7 599 | } 600 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Universal unarchiving software"; 3 | 4 | inputs = { 5 | nixpkgs.follows = "haskellNix/nixpkgs-unstable"; 6 | haskellNix.url = "github:input-output-hk/haskell.nix"; 7 | flake-utils.url = "github:numtide/flake-utils"; 8 | }; 9 | 10 | outputs = { self, nixpkgs, flake-utils, haskellNix }: 11 | flake-utils.lib.eachDefaultSystem (system: 12 | let 13 | pkgs = import nixpkgs { 14 | inherit system overlays; 15 | inherit (haskellNix) config; 16 | }; 17 | flake = pkgs.una.flake { 18 | }; 19 | overlays = [ haskellNix.overlay 20 | (final: prev: { 21 | una = 22 | final.haskell-nix.project' { 23 | src = ./.; 24 | supportHpack = true; 25 | compiler-nix-name = "ghc910"; 26 | shell.tools = { 27 | cabal = {}; 28 | haskell-language-server = {}; 29 | # hlint = {}; 30 | }; 31 | shell.buildInputs = with pkgs; [ 32 | pkg-config 33 | ]; 34 | }; 35 | }) 36 | ]; 37 | in { 38 | packages.default = flake.packages."una:exe:una"; 39 | devShell = flake.devShell // { 40 | withHoogle = true; 41 | }; 42 | }); 43 | } 44 | -------------------------------------------------------------------------------- /una.cabal: -------------------------------------------------------------------------------- 1 | Name: una 2 | 3 | Version: 2.1.0 4 | Synopsis: Universal un-archiver utility 5 | 6 | Description: A simple universal unarchiving utility. Just point it at any 7 | archive or compressed file, and it spits out a single file or 8 | directory in the current directory with its contents. Use -d to 9 | delete the original archive on success. Use -f to overwrite any 10 | existing file or directory which might be in the way. 11 | 12 | Homepage: https://github.com/jwiegley/una 13 | License: BSD3 14 | License-file: LICENSE 15 | Author: John Wiegley 16 | Maintainer: John Wiegley 17 | Category: Utils 18 | Build-type: Simple 19 | Cabal-version: >= 1.8 20 | 21 | Extra-Source-Files: README.md 22 | 23 | Executable una 24 | Main-is: Main.hs 25 | Ghc-options: -threaded 26 | 27 | Build-depends: base >= 4 && < 5, cmdargs, io-storage, directory, process, 28 | filepath, bytestring 29 | 30 | Source-repository head 31 | type: git 32 | location: https://github.com/jwiegley/una 33 | --------------------------------------------------------------------------------