├── datafiles ├── templates │ ├── hackageErrorPage.txt.st │ ├── hackageCssTheme.st │ ├── hackageErrorPage.html.st │ ├── Html │ │ ├── distro-monitor.html.st │ │ ├── maintain-candidate.html.st │ │ └── maintain.html.st │ ├── hackagePageHeader.st │ ├── UserSignupReset │ │ ├── ResetEmailSent.html.st │ │ ├── SignupEmailSent.html.st │ │ ├── SignupConfirmation.email.st │ │ ├── ResetConfirmation.email.st │ │ ├── ResetConfirm.html.st │ │ ├── SignupConfirm.html.st │ │ ├── ResetRequest.html.st │ │ └── SignupRequest.html.st │ ├── AdminFrontend │ │ ├── accounts.html.st │ │ ├── resets.html.st │ │ ├── legacy.html.st │ │ ├── signups.html.st │ │ └── admin.html.st │ ├── Search │ │ └── opensearch.xml.st │ ├── accounts.html.st │ ├── EditCabalFile │ │ ├── cabalFilePublished.html.st │ │ └── cabalFileEditPage.html.st │ ├── LegacyPasswds │ │ ├── htpasswd-upgrade-success.html.st │ │ └── htpasswd-upgrade.html.st │ ├── index.html.st │ └── upload.html.st └── static │ ├── favicon.ico │ ├── hslogo-16.png │ ├── cabal-tiny.png │ └── built-with-cabal.png ├── .authorspellings ├── .gitignore ├── rundocs.sh ├── .travis.yml ├── Dockerfile ├── tests ├── TarCheck.hs ├── Util.hs ├── Package.hs ├── Run.hs ├── MailUtils.hs └── CreateUserTest.hs ├── Distribution ├── Server │ ├── Util │ │ ├── GZip.hs │ │ ├── Merge.hs │ │ ├── Parse.hs │ │ ├── SigTerm.hs │ │ ├── TarIndex.hs │ │ ├── Happstack.hs │ │ ├── ContentType.hs │ │ ├── Histogram.hs │ │ ├── TextSearch.hs │ │ ├── Index.hs │ │ ├── NameIndex.hs │ │ └── CountingMap.hs │ ├── Features │ │ ├── Search │ │ │ ├── DocFeatVals.hs │ │ │ ├── DocTermIds.hs │ │ │ ├── TermBag.hs │ │ │ ├── ExtractDescriptionTerms.hs │ │ │ └── ExtractNameTerms.hs │ │ ├── Crash.hs │ │ ├── PackageCandidates │ │ │ ├── Types.hs │ │ │ ├── State.hs │ │ │ └── Backup.hs │ │ ├── Tags │ │ │ └── Backup.hs │ │ ├── HaskellPlatform │ │ │ └── State.hs │ │ ├── TarIndexCache │ │ │ └── State.hs │ │ ├── DownloadCount │ │ │ └── Backup.hs │ │ ├── Distro │ │ │ └── Types.hs │ │ ├── Documentation │ │ │ └── State.hs │ │ ├── Upload │ │ │ └── Backup.hs │ │ └── BuildReports │ │ │ └── State.hs │ ├── Framework │ │ ├── AuthTypes.hs │ │ ├── Logging.hs │ │ ├── MemState.hs │ │ ├── Hook.hs │ │ ├── ServerEnv.hs │ │ ├── AuthCrypt.hs │ │ └── Error.hs │ ├── Pages │ │ ├── Util.hs │ │ ├── Package │ │ │ ├── HaddockTypes.hs │ │ │ └── HaddockParse.y │ │ ├── Group.hs │ │ ├── Template.hs │ │ ├── Recent.hs │ │ ├── BuildReports.hs │ │ └── Distributions.hs │ ├── Packages │ │ ├── ChangeLog.hs │ │ ├── ModuleForest.hs │ │ └── Index.hs │ ├── Framework.hs │ └── Users │ │ ├── Types.hs │ │ └── Group.hs └── Client │ ├── HtPasswdDb.hs │ ├── PkgIndex.hs │ ├── DistroMap.hs │ ├── TagsFile.hs │ ├── Cron.hs │ ├── UserAddressesDb.hs │ ├── UploadLog.hs │ └── ParseApacheLogs.hs ├── run-docker.sh ├── docs └── run-doc-builder.sh.example ├── LICENSE ├── old-hackage-download.sh ├── old-hackage-import.sh ├── Data └── StringTable.hs └── README.md /datafiles/templates/hackageErrorPage.txt.st: -------------------------------------------------------------------------------- 1 | Error: $errorTitle$ 2 | 3 | $errorMessage$ 4 | 5 | -------------------------------------------------------------------------------- /.authorspellings: -------------------------------------------------------------------------------- 1 | Antoine Latter 2 | Matthew Gruen 3 | 4 | -------------------------------------------------------------------------------- /datafiles/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpmestan/hackage-server/master/datafiles/static/favicon.ico -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build-cache 2 | mirror-cache 3 | state 4 | dist 5 | tags 6 | *.swp 7 | .cabal-sandbox 8 | cabal.sandbox.config -------------------------------------------------------------------------------- /datafiles/static/hslogo-16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpmestan/hackage-server/master/datafiles/static/hslogo-16.png -------------------------------------------------------------------------------- /datafiles/static/cabal-tiny.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpmestan/hackage-server/master/datafiles/static/cabal-tiny.png -------------------------------------------------------------------------------- /datafiles/static/built-with-cabal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpmestan/hackage-server/master/datafiles/static/built-with-cabal.png -------------------------------------------------------------------------------- /rundocs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | while true 4 | do 5 | dist/build/hackage-build/hackage-build build --run-time=25 6 | sleep 300 7 | done 8 | -------------------------------------------------------------------------------- /datafiles/templates/hackageCssTheme.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /datafiles/templates/hackageErrorPage.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: $errorTitle$ 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

$errorTitle$

13 | 14 | $errorMessage$ 15 | 16 |
17 | 18 | 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: haskell 2 | 3 | notifications: 4 | email: false 5 | irc: "chat.freenode.net#hackage" 6 | 7 | install: 8 | - sudo apt-get install libicu-dev 9 | - cabal install --only-dependencies --enable-tests --disable-optimization --ghc-options=-w 10 | 11 | script: 12 | - cabal configure --enable-tests --disable-optimization && cabal build 13 | - travis_retry cabal test 14 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | from zsol/haskell-platform-2013.2.0.0 2 | 3 | env HOME /home/haskell 4 | 5 | # dependencies 6 | run sudo apt-get install unzip libicu48 libicu-dev 7 | run cabal update 8 | run curl -LO https://github.com/haskell/hackage-server/archive/master.zip 9 | 10 | run unzip master.zip 11 | 12 | workdir /home/haskell/hackage-server-master 13 | 14 | run cabal install --only-dependencies 15 | run cabal configure && cabal build 16 | 17 | cmd echo "Binaries are in ./dist/build/hackage-*" 18 | -------------------------------------------------------------------------------- /datafiles/templates/Html/distro-monitor.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Tarballs for $pkgname$ 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Tarballs for $pkgname$

13 | 14 |

The following versions of $pkgname$ exist: 15 | 16 |

19 | 20 |

21 | 22 |
23 | 24 | -------------------------------------------------------------------------------- /datafiles/templates/hackagePageHeader.st: -------------------------------------------------------------------------------- 1 | 12 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/ResetEmailSent.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Account recovery 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Account recovery email sent

13 | 14 |

An email has been sent to $useremail$ 15 | 16 |

The email will contain a link to a page where 17 | you can set a new password. 18 | 19 |

Note that these activation links do eventually expire, 20 | so don't leave it too long! 21 | 22 |

23 | 24 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/SignupEmailSent.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Register a new account 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Confirmation email sent

13 | 14 |

An email has been sent to $useremail$ 15 | 16 |

The email will contain a link to a page where 17 | you can set your password and activate your account. 18 | 19 |

Note that these activation links do eventually expire, 20 | so don't leave it too long! 21 | 22 |

23 | 24 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/SignupConfirmation.email.st: -------------------------------------------------------------------------------- 1 | Dear $realname$, 2 | 3 | We received a request to create a Hackage account 4 | for you. To create a Hackage account, please follow 5 | this link: 6 | 7 | $confirmlink$ 8 | 9 | If you were not expecting this email, our apologies, 10 | please ignore it. 11 | 12 | From, 13 | The Hackage website at $serverhost$ 14 | (and on behalf of the site administrators) 15 | ______________________________________________ 16 | Please do not reply to this email. This email 17 | address is used only for sending email so you 18 | will not receive a response. 19 | 20 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/ResetConfirmation.email.st: -------------------------------------------------------------------------------- 1 | Dear $realname$, 2 | 3 | We received a lost password request for your Hackage 4 | user account. To set a new password, please follow 5 | this link: 6 | 7 | $confirmlink$ 8 | 9 | If you were not expecting this email, our apologies, 10 | please ignore it. 11 | 12 | From, 13 | The Hackage website at $serverhost$ 14 | (and on behalf of the site administrators) 15 | ______________________________________________ 16 | Please do not reply to this email. This email 17 | address is used only for sending email so you 18 | will not receive a response. 19 | 20 | -------------------------------------------------------------------------------- /tests/TarCheck.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Server.Upload (unpackPackage) 2 | import qualified Data.ByteString.Lazy as BS 3 | 4 | import System.Environment (getArgs) 5 | import System.FilePath (takeFileName) 6 | 7 | main = do 8 | files <- getArgs 9 | sequence_ 10 | [ do tar <- BS.readFile file 11 | case unpackPackage (takeFileName file) tar of 12 | Left err -> do 13 | putStrLn $ "FAILED! " ++ takeFileName file 14 | putStr $ err ++ "\n\n" 15 | Right (pkg, []) -> return () 16 | Right (pkg, warnings) -> do 17 | putStrLn $ "Warnings " ++ takeFileName file 18 | putStrLn (unlines warnings) 19 | | file <- files ] 20 | -------------------------------------------------------------------------------- /Distribution/Server/Util/GZip.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Util.GZip ( 2 | decompressNamed 3 | ) where 4 | 5 | import qualified Codec.Compression.GZip as GZip 6 | import Control.Exception 7 | import Data.ByteString.Lazy.Internal 8 | 9 | decompressNamed :: String -> ByteString -> ByteString 10 | decompressNamed n bs = 11 | mapExceptionRecursive mapError $ GZip.decompress bs 12 | where 13 | mapError (ErrorCall str) = ErrorCall $ str ++ " in " ++ show n 14 | 15 | mapExceptionRecursive :: (Exception e1, Exception e2) => (e1 -> e2) -> ByteString -> ByteString 16 | mapExceptionRecursive f bs = 17 | case mapException f bs of 18 | Empty -> Empty 19 | Chunk b bs' -> Chunk b (mapExceptionRecursive f bs') 20 | -------------------------------------------------------------------------------- /datafiles/templates/AdminFrontend/accounts.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: browse user accounts 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Admin front-end

13 | 14 |

Browse user accounts

15 | 16 |

All user accounts, including disabled and deleted.

17 | 18 |
    19 | $accounts:{account| 20 |
  • 21 | $if(account.active)$ 22 | $account.name$ 23 | $else$ 24 | $account.name$ (deleted) 25 | $endif$ 26 |
  • 27 | }$ 28 |
29 | 30 |
31 | 32 | 33 | -------------------------------------------------------------------------------- /datafiles/templates/AdminFrontend/resets.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: browse reset requests 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Admin front-end

13 | 14 |

Browse password reset requests

15 | 16 | All currently active password reset requests (i.e. excluding expired and completed). 17 | 18 | 19 | 20 | 21 | 22 | 23 | $resets:{reset| 24 | 25 | 26 | 27 | 28 | }$ 29 |
User nameTimestamp
$reset.account.name$$reset.timestamp$
30 | 31 |
32 | 33 | 34 | -------------------------------------------------------------------------------- /datafiles/templates/Search/opensearch.xml.st: -------------------------------------------------------------------------------- 1 | 2 | 4 | Hackage 5 | Search for Haskell packages on Hackage 6 | UTF-8 7 | $serverhost$/favicon.ico 8 | 10 | 14 | $serverhost$/packages/search 15 | 16 | -------------------------------------------------------------------------------- /Distribution/Server/Util/Merge.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Util.Merge where 2 | 3 | import Data.Map 4 | 5 | 6 | data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b 7 | 8 | mergeBy :: (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] 9 | mergeBy cmp = merge 10 | where 11 | merge [] ys = [ OnlyInRight y | y <- ys] 12 | merge xs [] = [ OnlyInLeft x | x <- xs] 13 | merge (x:xs) (y:ys) = 14 | case x `cmp` y of 15 | GT -> OnlyInRight y : merge (x:xs) ys 16 | EQ -> InBoth x y : merge xs ys 17 | LT -> OnlyInLeft x : merge xs (y:ys) 18 | 19 | mergeMaps :: Ord k => Map k a -> Map k b -> Map k (MergeResult a b) 20 | mergeMaps m1 m2 = unionWith (\(OnlyInLeft a) (OnlyInRight b) -> InBoth a b) (fmap OnlyInLeft m1) (fmap OnlyInRight m2) 21 | -------------------------------------------------------------------------------- /datafiles/templates/AdminFrontend/legacy.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: User accounts with legacy passwords 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Admin front-end

13 | 14 |

User accounts with legacy passwords

15 | 16 |

All user accounts with legacy "htpasswd" passwords set:

17 | 18 | $if(first(accounts))$ 19 |

    20 | $accounts:{account| 21 |
  • 22 | $if(account.active)$ 23 | $account.name$ 24 | $else$ 25 | $account.name$ (deleted) 26 | $endif$ 27 |
  • 28 | }$ 29 |
30 | $else$ 31 |

None.

32 | $endif$ 33 | 34 |
35 | 36 | 37 | -------------------------------------------------------------------------------- /datafiles/templates/AdminFrontend/signups.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: browse signup requests 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Admin front-end

13 | 14 |

Browse account signup requests

15 | 16 | All currently active account signup requests (i.e. excluding expired and completed). 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | $signups:{signup| 26 | 27 | 28 | 29 | 30 | 31 | 32 | }$ 33 |
User nameReal nameEmailTimestamp
$signup.username$$signup.realname$$signup.email$$signup.timestamp$
34 | 35 | 36 |
37 | 38 | 39 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Search/DocFeatVals.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving #-} 2 | module Distribution.Server.Features.Search.DocFeatVals ( 3 | DocFeatVals, 4 | featureValue, 5 | create, 6 | ) where 7 | 8 | import Distribution.Server.Features.Search.DocTermIds (vecIndexIx, vecCreateIx) 9 | import Distribution.Server.Framework.MemSize 10 | import Data.Vector (Vector) 11 | import Data.Ix (Ix) 12 | 13 | 14 | -- | Storage for the non-term feature values i a document. 15 | -- 16 | newtype DocFeatVals feature = DocFeatVals (Vector Float) 17 | deriving (Show, MemSize) 18 | 19 | featureValue :: (Ix feature, Bounded feature) => DocFeatVals feature -> feature -> Float 20 | featureValue (DocFeatVals featVec) = vecIndexIx featVec 21 | 22 | create :: (Ix feature, Bounded feature) => 23 | (feature -> Float) -> DocFeatVals feature 24 | create docFeatVals = 25 | DocFeatVals (vecCreateIx docFeatVals) 26 | 27 | -------------------------------------------------------------------------------- /run-docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | ADMIN_USER=admin 6 | ADMIN_PASS=admin 7 | BUILD_RUN_TIME=30 8 | BUILD_INTERVAL=5 9 | 10 | DOCKER_IMAGE=zsol/hackage-server 11 | 12 | server_id=$(sudo docker run -d -p 8080 ${DOCKER_IMAGE} sh -c "./dist/build/hackage-server/hackage-server init --static-dir=datafiles --admin=\"${ADMIN_USER}:${ADMIN_PASS}\" ; ./dist/build/hackage-server/hackage-server run --static-dir=datafiles") 13 | 14 | server_ip=$(sudo docker inspect $server_id | grep IPAddress | cut -d'"' -f4) 15 | server_local_port=$(sudo docker port $server_id 8080) 16 | 17 | sleep 2 # TODO: poll until web server comes up 18 | 19 | sudo docker run -d ${DOCKER_IMAGE} sh -c "echo -e \"${ADMIN_USER}\n${ADMIN_PASS}\" | ./dist/build/hackage-build/hackage-build init http://${server_ip}:8080; ./dist/build/hackage-build/hackage-build build --run-time=${BUILD_RUN_TIME} --interval=${BUILD_INTERVAL} --continuous" 20 | 21 | echo "You can access your local hackage at http://localhost:${server_local_port}" 22 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/ResetConfirm.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Account recovery 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Account recovery

13 | 14 |

Email confirmation done! 15 | 16 |

Now you can set a new password. 17 | 18 |

19 | 20 |
Your name:$realname$ 21 |
Login username:$username$ 22 |
Contact email address:$useremail$ 23 |
24 | 25 |
26 | 27 |
28 | 29 |

30 |

31 | 32 |
33 | 34 | 35 | -------------------------------------------------------------------------------- /datafiles/templates/accounts.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: User accounts 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

User accounts

13 |

Most of the functions of the Hackage web interface 14 | (including browsing and checking packages) are available to all. 15 | However, uploading packages requires a Hackage username and password. 16 |

17 | 18 |

So, you can register a new user account.

19 | 20 |

You can also peruse the list of users.

21 | 22 |

Passwords are not stored, just the digest.

23 | 24 |

If you forget your password you can reset it 25 | so long as you know your user login name and the email address you 26 | originally registered with. The system will send you an email with a 27 | link you can use to set a new password.

28 |
29 | 30 | -------------------------------------------------------------------------------- /docs/run-doc-builder.sh.example: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | PIDFILE=@PIDFILE@ 4 | EXECUTABLE=@HACKAGEBUILD@ 5 | DIRECTORY=@DIRECTORY@ 6 | A_WHILE="1 hour" 7 | 8 | @EXTRA@ 9 | 10 | if [ -e "$PIDFILE" ] 11 | then 12 | EXISTS=1 13 | touch --date="$A_WHILE ago" earlier 14 | if [ "$PIDFILE" -ot earlier ] 15 | then 16 | BEEN_A_WHILE=1 17 | touch "$PIDFILE" 18 | else 19 | BEEN_A_WHILE=0 20 | fi 21 | else 22 | EXISTS=0 23 | fi 24 | 25 | /sbin/start-stop-daemon --quiet --start --pidfile "$PIDFILE" --make-pidfile -d "$DIRECTORY" --startas "$EXECUTABLE" -- build -s 26 | 27 | if [ "$?" -ne 0 ] 28 | then 29 | if [ "$EXISTS" -eq 1 ] 30 | then 31 | if [ "$BEEN_A_WHILE" -eq 1 ] 32 | then 33 | echo "Still running after at least $A_WHILE; something wrong?" >&2 34 | exit 1 35 | fi 36 | else 37 | if [ ! -e "$PIDFILE" ] 38 | then 39 | echo "Starting failed" >&2 40 | exit 1 41 | fi 42 | fi 43 | fi 44 | 45 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/SignupConfirm.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Register a new account 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Register a new account

13 | 14 |

Email confirmation done! 15 | 16 |

Now you can set your password and create the account. 17 | 18 |

19 | 20 |
Your name:$realname$ 21 |
Login username:$username$ 22 |
Contact email address:$useremail$ 23 |
24 | 25 |
26 | 27 |
28 | 29 |

30 |

31 | 32 |
33 | 34 | 35 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/AuthTypes.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, TemplateHaskell #-} 2 | module Distribution.Server.Framework.AuthTypes where 3 | 4 | import Distribution.Server.Framework.MemSize 5 | 6 | import Data.SafeCopy (base, deriveSafeCopy) 7 | import Data.Typeable (Typeable) 8 | 9 | -- | A plain, unhashed password. Careful what you do with them. 10 | -- 11 | newtype PasswdPlain = PasswdPlain String 12 | deriving Eq 13 | 14 | -- | A password hash. It actually contains the hash of the username, passowrd 15 | -- and realm. 16 | -- 17 | -- Hashed passwords are stored in the format 18 | -- @md5 (username ++ ":" ++ realm ++ ":" ++ password)@. This format enables 19 | -- us to use either the basic or digest HTTP authentication methods. 20 | -- 21 | newtype PasswdHash = PasswdHash String 22 | deriving (Eq, Ord, Show, Typeable, MemSize) 23 | 24 | newtype RealmName = RealmName String 25 | deriving (Show, Eq) 26 | 27 | $(deriveSafeCopy 0 'base ''PasswdPlain) 28 | $(deriveSafeCopy 0 'base ''PasswdHash) 29 | 30 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Util.hs: -------------------------------------------------------------------------------- 1 | 2 | module Distribution.Server.Pages.Util 3 | ( hackageNotFound 4 | , hackageError 5 | 6 | , makeInput 7 | , makeCheckbox 8 | ) where 9 | 10 | import Distribution.Server.Pages.Template (hackagePage) 11 | 12 | import Text.XHtml.Strict 13 | 14 | hackageNotFound :: HTML a => a -> Html 15 | hackageNotFound contents 16 | = hackagePage "Not Found" [toHtml contents] 17 | 18 | hackageError :: HTML a => a -> Html 19 | hackageError contents 20 | = hackagePage "Error" [toHtml contents] 21 | 22 | makeInput :: [HtmlAttr] -> String -> String -> [Html] 23 | makeInput attrs fname labelName = [label ! [thefor fname] << labelName, 24 | input ! (attrs ++ [name fname, identifier fname])] 25 | 26 | makeCheckbox :: Bool -> String -> String -> String -> [Html] 27 | makeCheckbox isChecked fname fvalue labelName = [input ! ([thetype "checkbox", name fname, identifier fname, value fvalue] 28 | ++ if isChecked then [checked] else []), 29 | toHtml " ", 30 | label ! [thefor fname] << labelName] 31 | -------------------------------------------------------------------------------- /tests/Util.hs: -------------------------------------------------------------------------------- 1 | -- | Miscellaneous utility functions 2 | {-# LANGUAGE ViewPatterns #-} 3 | {-# OPTIONS_GHC -Wall #-} 4 | module Util ( 5 | explode 6 | , trim 7 | , die 8 | , info 9 | , decodeJSON 10 | ) where 11 | 12 | import Data.Char 13 | import Data.Aeson 14 | import System.IO 15 | import System.Exit 16 | 17 | import qualified Data.ByteString.Lazy.Char8 as LBS 18 | 19 | -- | > explode ',' "abc,def,ghi" == ["abc", "def", "ghi"] 20 | explode :: Eq a => a -> [a] -> [[a]] 21 | explode x (break (== x) -> (xs1, [])) = [xs1] 22 | explode x (break (== x) -> (xs1, (_ : xs'))) = xs1 : explode x xs' 23 | explode _ _ = fail "the impossible happened" 24 | 25 | -- | Remove leading and trailing whitespace 26 | trim :: String -> String 27 | trim = reverse . dropWhile isSpace 28 | . reverse . dropWhile isSpace 29 | 30 | info :: String -> IO () 31 | info str = putStrLn ("= " ++ str) 32 | 33 | die :: String -> IO a 34 | die err = do hPutStrLn stderr err 35 | exitFailure 36 | 37 | decodeJSON :: FromJSON a => String -> IO a 38 | decodeJSON str = 39 | case decode (LBS.pack str) of 40 | Nothing -> fail "Could not decode JSON" 41 | Just result -> return result 42 | -------------------------------------------------------------------------------- /datafiles/templates/Html/maintain-candidate.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Maintainers' page for $pkgname$-$pkgversion$ candidate 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Maintainers' page for $pkgname$-$pkgversion$ candidate

13 | 14 |

Here, you can delete a candidate, publish it, upload a new one, and 15 | edit the maintainer group. 16 | 17 |

18 |
Delete candidate
19 |
Discard this candidate (does not affect published packages). 20 |
21 | 22 |
Publish candidate
23 |
Publish this candidate to make it visible in the main package database. 24 |
25 | 26 |
Upload a new candidate
27 |
If you upload a new candidate with the same version as an 28 | existing candidate, the older will be overwritten. 29 |
30 | 31 |
Maintainer group
32 |
Only these users are allowed to upload new versions of the package. 33 | Existing members can add other users into the maintainer group. 34 |
35 | 36 |
37 | 38 |
39 | 40 | -------------------------------------------------------------------------------- /Distribution/Client/HtPasswdDb.hs: -------------------------------------------------------------------------------- 1 | -- | Parsing @.htpasswd@ files 2 | -- 3 | module Distribution.Client.HtPasswdDb ( 4 | HtPasswdDb, HtPasswdHash(..), 5 | parse, 6 | ) where 7 | 8 | import Distribution.Server.Users.Types (UserName(..)) 9 | 10 | type HtPasswdDb = [(UserName, Maybe HtPasswdHash)] 11 | 12 | newtype HtPasswdHash = HtPasswdHash String 13 | deriving (Eq, Show) 14 | 15 | parse :: String -> Either String HtPasswdDb 16 | parse = accum 0 [] . map parseLine . lines 17 | where 18 | accum _ pairs [] = Right (reverse pairs) 19 | accum n pairs (Just pair:rest) = accum (n+1) (pair:pairs) rest 20 | accum n _ (Nothing :_ ) = Left errmsg 21 | where errmsg = "parse error in htpasswd file on line " ++ show (n :: Int) 22 | 23 | parseLine :: String -> Maybe (UserName, Maybe HtPasswdHash) 24 | parseLine line = case break (==':') line of 25 | 26 | -- entries like "myName:$apr1$r31.....$HqJZimcKQFAMYayBlzkrA/" 27 | -- this is a special Apache md5-based format that we do not handle 28 | (user@(_:_), ':' :'$':_) -> Just (UserName user, Nothing) 29 | 30 | -- entries like "myName:rqXexS6ZhobKA" 31 | (user@(_:_), ':' : hash) -> Just (UserName user, Just (HtPasswdHash hash)) 32 | _ -> Nothing 33 | -------------------------------------------------------------------------------- /Distribution/Server/Packages/ChangeLog.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE NamedFieldPuns #-} 2 | module Distribution.Server.Packages.ChangeLog ( 3 | findChangeLog 4 | ) where 5 | 6 | import Data.TarIndex (TarIndex, TarEntryOffset) 7 | import qualified Data.TarIndex as TarIndex 8 | import Distribution.Server.Packages.Types (PkgInfo) 9 | import Distribution.Package (packageId) 10 | import Distribution.Text (display) 11 | 12 | import System.FilePath ((), splitExtension) 13 | import Data.Char as Char 14 | import Data.Maybe 15 | 16 | 17 | findChangeLog :: PkgInfo -> TarIndex -> Maybe (TarEntryOffset, String) 18 | findChangeLog pkg index = do 19 | let topdir = display (packageId pkg) 20 | TarIndex.TarDir fnames <- TarIndex.lookup index topdir 21 | listToMaybe 22 | [ (offset, fname') 23 | | fname <- fnames 24 | , isChangelogFile fname 25 | , let fname' = topdir fname 26 | , Just (TarIndex.TarFileEntry offset) <- [TarIndex.lookup index fname'] ] 27 | where 28 | isChangelogFile fname = 29 | let (base, ext) = splitExtension fname 30 | in map Char.toLower base `elem` basenames 31 | && ext `elem` extensions 32 | 33 | basenames = ["changelog", "change_log", "changes"] 34 | extensions = ["", ".txt", ".md", ".markdown"] 35 | 36 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/ResetRequest.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Account recovery 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Account recovery for forgotten passwords

13 | 14 |

If you have forgotten the password you use to log in to Hackage, you can 15 | recover the account and set a new password. 16 | 17 |

Enter your account name and the email address that you originally 18 | signed up with. The system will send you an email with a link that you can use 19 | to set a new password. 20 | 21 |

22 | 23 | 24 | 25 | 29 |
26 | 27 | 28 |
30 | 31 | This must be the same email address that you registered with originally. 32 |
33 | 34 |

35 | 36 |

You will be sent an email containing a link to a page where you 37 | can set a new password. 38 |

39 | 40 |
41 | 42 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/Logging.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Framework.Logging ( 2 | Verbosity, 3 | lognotice, 4 | loginfo, 5 | logdebug, 6 | logTiming, 7 | ) where 8 | 9 | import Distribution.Verbosity 10 | import System.IO 11 | import qualified Data.ByteString.Char8 as BS -- No UTF8 in log messages 12 | import System.Environment 13 | import Control.Monad (when) 14 | import Data.Time.Clock (getCurrentTime, diffUTCTime) 15 | 16 | 17 | lognotice :: Verbosity -> String -> IO () 18 | lognotice verbosity msg = 19 | when (verbosity >= normal) $ do 20 | pname <- getProgName 21 | BS.hPutStrLn stdout (BS.pack $ pname ++ ": " ++ msg) 22 | hFlush stdout 23 | 24 | loginfo :: Verbosity -> String -> IO () 25 | loginfo verboisty msg = 26 | when (verboisty >= verbose) $ do 27 | BS.hPutStrLn stderr (BS.pack msg) 28 | hFlush stderr 29 | 30 | logdebug :: Verbosity -> String -> IO () 31 | logdebug verbosity msg = 32 | when (verbosity >= deafening) $ do 33 | BS.hPutStrLn stderr (BS.pack msg) 34 | hFlush stderr 35 | 36 | logTiming :: Verbosity -> String -> IO a -> IO a 37 | logTiming verbosity msg action = do 38 | t <- getCurrentTime 39 | res <- action 40 | t' <- getCurrentTime 41 | loginfo verbosity (msg ++ ". time: " ++ show (diffUTCTime t' t)) 42 | return res 43 | 44 | -------------------------------------------------------------------------------- /datafiles/templates/EditCabalFile/cabalFilePublished.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Published new revision for $pkgid$ 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Published new revision for $pkgid$

13 | 14 |

The new revision has been published. It will be available to users as soon 15 | as they update their package index (e.g. cabal update). 16 | 17 |

$cabalfile$
18 | 19 |

Changes in this revision

20 |
    21 | $changes:{change|
  • Changed $change.what$ 22 | from

    $change.from$
    23 | to
    $change.to$
  • }$ 24 |
25 | 26 |

Advice on adjusting version constraints

27 | 28 |

You can edit the version constraints for the dependencies, 29 | either to restrict or relax them. The goal in editing the constraints should 30 | always be to make them reflect reality. 31 |

    32 |
  • If the package fails to build against certain versions of a dependency 33 | then constrain the version. 34 |

  • If the package builds against and work correctly with a newer 35 | version of a dependency then it is ok to relax the constraint 36 |

      37 | 38 |
39 | 40 | 41 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/MemState.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Framework.MemState ( 2 | MemState, 3 | newMemStateNF, 4 | newMemStateWHNF, 5 | readMemState, 6 | writeMemState, 7 | modifyMemState, 8 | ) where 9 | 10 | import Control.Concurrent.MVar 11 | import Control.Exception (evaluate) 12 | import Control.Monad.Trans (MonadIO(liftIO)) 13 | import Control.DeepSeq (NFData, rnf) 14 | 15 | -- | General-purpose in-memory ephemeral state. 16 | data MemState a = MemState !(MVar a) (a -> ()) 17 | 18 | newMemStateWHNF :: a -> IO (MemState a) 19 | newMemStateWHNF state = do 20 | var <- newMVar state 21 | return (MemState var (\x -> seq x ())) 22 | 23 | newMemStateNF :: NFData a => a -> IO (MemState a) 24 | newMemStateNF state = do 25 | var <- newMVar state 26 | return (MemState var rnf) 27 | 28 | readMemState :: MonadIO m => MemState a -> m a 29 | readMemState (MemState var _) = liftIO $ readMVar var 30 | 31 | writeMemState :: MonadIO m => MemState a -> a -> m () 32 | writeMemState (MemState var force) x = 33 | liftIO $ modifyMVar_ var $ \_ -> do 34 | evaluate (force x) 35 | return x 36 | 37 | modifyMemState :: MonadIO m => MemState a -> (a -> a) -> m () 38 | modifyMemState (MemState var force) f = 39 | liftIO $ modifyMVar_ var $ \x -> do 40 | let x' = f x 41 | evaluate (force x') 42 | return x' 43 | 44 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Crash.hs: -------------------------------------------------------------------------------- 1 | -- | This is strictly for debugging only. Throws various kinds of exceptions. 2 | module Distribution.Server.Features.Crash (serverCrashFeature) where 3 | 4 | import Distribution.Server.Framework 5 | 6 | import Data.Maybe 7 | import Control.Exception 8 | import Control.Concurrent 9 | 10 | serverCrashFeature :: HackageFeature 11 | serverCrashFeature = (emptyHackageFeature "crash") { 12 | featureDesc = "Throw various kinds of exceptions (for debugging purposes)" 13 | , featureResources = [ 14 | (resourceAt "/crash/throw/:userError/:delay") { 15 | resourceDesc = [ (GET, "Throw a user error") ] 16 | , resourceGet = [ ("", throwUserError) ] 17 | } 18 | ] 19 | , featureState = [] 20 | } 21 | 22 | throwUserError :: DynamicPath -> ServerPartE Response 23 | throwUserError dpath = liftIO $ do 24 | let ex :: IOError 25 | ex = userError $ fromJust (lookup "userError" dpath) 26 | 27 | delay :: Int 28 | delay = read $ fromJust (lookup "delay" dpath) 29 | 30 | if delay == 0 31 | then throwIO ex 32 | else do tid <- myThreadId 33 | void . forkIO $ do threadDelay delay 34 | putStrLn "Throwing exception.." 35 | throwTo tid ex 36 | return . toResponse $ "Throwing exception in " ++ show delay ++ " microseconds" 37 | -------------------------------------------------------------------------------- /Distribution/Client/PkgIndex.hs: -------------------------------------------------------------------------------- 1 | ----------------------------------------------------------------------------- 2 | -- | 3 | -- Module : Distribution.Client.PkgIndex 4 | -- Copyright : (c) Duncan Coutts 2012 5 | -- License : BSD-like 6 | -- 7 | -- Maintainer : duncan@community.haskell.org 8 | -- Stability : provisional 9 | -- Portability : portable 10 | -- 11 | -- Support for importing cabal files from a 00-index.tar.gz file 12 | ----------------------------------------------------------------------------- 13 | module Distribution.Client.PkgIndex ( 14 | readPkgIndex 15 | ) where 16 | 17 | import qualified Distribution.Server.Util.Index as PackageIndex (read) 18 | import qualified Codec.Archive.Tar.Entry as Tar (Entry(..), EntryContent(..)) 19 | 20 | import Distribution.Package 21 | 22 | import Data.ByteString.Lazy (ByteString) 23 | import qualified Distribution.Server.Util.GZip as GZip 24 | 25 | import Prelude hiding (read) 26 | 27 | 28 | readPkgIndex :: ByteString -> Either String [(PackageIdentifier, ByteString)] 29 | readPkgIndex = fmap extractCabalFiles 30 | . PackageIndex.read (,) 31 | . GZip.decompressNamed "<>" 32 | where 33 | extractCabalFiles entries = 34 | [ (pkgid, cabalFile) 35 | | (pkgid, Tar.Entry { 36 | Tar.entryContent = Tar.NormalFile cabalFile _ 37 | }) <- entries ] 38 | 39 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/Hook.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Framework.Hook ( 2 | Hook, 3 | newHook, 4 | registerHook, 5 | registerHookJust, 6 | 7 | runHook, 8 | runHook_, 9 | ) where 10 | 11 | import Data.IORef 12 | import Control.Monad.Trans (MonadIO, liftIO) 13 | 14 | -- | A list of actions accociated with an event. 15 | -- 16 | newtype Hook a b = Hook (IORef [a -> IO b]) 17 | 18 | newHook :: IO (Hook a b) 19 | newHook = fmap Hook $ newIORef [] 20 | 21 | -- registers a hook to be run *before* all of the previously registered hooks. 22 | -- is this the best strategy? relying on ordering rules of any kind can introduce 23 | -- nasty bugs. 24 | registerHook :: Hook a b -> (a -> IO b) -> IO () 25 | registerHook (Hook ref) action = 26 | atomicModifyIORef ref (\actions -> (action:actions, ())) 27 | 28 | registerHookJust :: Hook a () -> (a -> Maybe b) -> (b -> IO ()) -> IO () 29 | registerHookJust (Hook ref) predicate action = 30 | atomicModifyIORef ref (\actions -> (action':actions, ())) 31 | where 32 | action' x = maybe (return ()) action (predicate x) 33 | 34 | runHook :: MonadIO m => Hook a b -> a -> m [b] 35 | runHook (Hook ref) x = liftIO $ do 36 | actions <- readIORef ref 37 | sequence [ action x | action <- actions ] 38 | 39 | runHook_ :: MonadIO m => Hook a () -> a -> m () 40 | runHook_ (Hook ref) x = liftIO $ do 41 | actions <- readIORef ref 42 | sequence_ [ action x | action <- actions ] 43 | 44 | -------------------------------------------------------------------------------- /Distribution/Server/Util/Parse.hs: -------------------------------------------------------------------------------- 1 | -- | Parsing and UTF8 utilities 2 | module Distribution.Server.Util.Parse ( 3 | int, unpackUTF8, packUTF8 4 | ) where 5 | 6 | import qualified Distribution.Compat.ReadP as Parse 7 | 8 | import qualified Data.Char as Char 9 | import Data.ByteString.Lazy (ByteString) 10 | import qualified Data.Text.Lazy as Text 11 | import qualified Data.Text.Lazy.Encoding as Text 12 | import qualified Data.Text.Encoding.Error as Text 13 | 14 | -- | Parse a positive integer. No leading @0@'s allowed. 15 | -- 16 | int :: Parse.ReadP r Int 17 | int = do 18 | first <- Parse.satisfy Char.isDigit 19 | if first == '0' 20 | then return 0 21 | else do rest <- Parse.munch Char.isDigit 22 | return (read (first : rest)) 23 | 24 | -- | Ignore a Unicode byte order mark (BOM) at the beginning of the input 25 | -- 26 | -- (Also in Distribution.Simple.Utils, but not exported) 27 | ignoreBOM :: String -> String 28 | ignoreBOM ('\xFEFF':string) = string 29 | ignoreBOM string = string 30 | 31 | unpackUTF8 :: ByteString -> String 32 | unpackUTF8 = ignoreBOM . Text.unpack . Text.decodeUtf8With Text.lenientDecode 33 | 34 | packUTF8 :: String -> ByteString 35 | packUTF8 = Text.encodeUtf8 . Text.pack 36 | -------------------------------------------------------------------------------- /datafiles/templates/LegacyPasswds/htpasswd-upgrade-success.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Account upgrade successful 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Account upgrade successful

13 |

Your account has been re-enabled. 14 |

15 | 16 |

Technical details for the curious

17 | 18 |

The old hackage implementation used HTTP basic authentication. The new system uses HTTP digest authentication. 19 | 20 |

We could not transparently upgrade accounts to the new system because 21 | the password hash format is different for the new system. The old 22 | format was the 23 | 24 | Apache basic auth 'CRYPT' format, while the new format is 25 | equivalent to the 26 | 27 | Apache digest authentication format. It is not possible to generate 28 | the new format without access to the plaintext password – which 29 | was never stored. So by authenticating once using the old account 30 | information – using HTTP basic authentication – we can 31 | generate and store password digest for the new system. 32 | 33 |

34 | 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2008, Duncan Coutts and David Himmelstrup. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | - Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | - Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | - The names of the contributors may not be used to endorse or promote 15 | products derived from this software without specific prior written 16 | permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY "AS IS" AND ANY EXPRESS OR IMPLIED 19 | WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 20 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 21 | NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 22 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 23 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF 24 | USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /Distribution/Server/Packages/ModuleForest.hs: -------------------------------------------------------------------------------- 1 | -- (C) Copyright by Bas van Dijk, v.dijk.bas@gmail.com, 2008 2 | -- Inspiration (read: copied, renamed and simplified) from: 3 | -- http://code.haskell.org/haddock/src/Haddock/ModuleTree.hs 4 | 5 | module Distribution.Server.Packages.ModuleForest ( ModuleForest, ModuleTree(..), moduleForest ) where 6 | 7 | import Distribution.ModuleName ( ModuleName, components ) 8 | 9 | -------------------------------------------------------------------------------- 10 | 11 | type ModuleForest = [ModuleTree] 12 | 13 | data ModuleTree = Node String -- Part of module name 14 | Bool -- Is this an existing module? 15 | ModuleForest -- Sub modules 16 | deriving (Show, Eq) 17 | 18 | -------------------------------------------------------------------------------- 19 | 20 | moduleForest :: [ModuleName] -> ModuleForest 21 | moduleForest = foldr (addToForest . components) [] 22 | 23 | addToForest :: [String] -> ModuleForest -> ModuleForest 24 | addToForest [] ts = ts 25 | addToForest ss [] = mkSubTree ss 26 | addToForest s1ss@(s1:ss) (t@(Node s2 isModule subs) : ts) 27 | | s1 > s2 = t : addToForest s1ss ts 28 | | s1 == s2 = Node s2 (isModule || null ss) (addToForest ss subs) : ts 29 | | otherwise = mkSubTree s1ss ++ t : ts 30 | 31 | mkSubTree :: [String] -> ModuleForest 32 | mkSubTree [] = [] 33 | mkSubTree (s:ss) = [Node s (null ss) (mkSubTree ss)] 34 | 35 | -------------------------------------------------------------------------------- 36 | 37 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Package/HaddockTypes.hs: -------------------------------------------------------------------------------- 1 | -- stolen from Haddock's Types.hs 2 | module Distribution.Server.Pages.Package.HaddockTypes where 3 | 4 | data Doc id 5 | = DocEmpty 6 | | DocAppend (Doc id) (Doc id) 7 | | DocString String 8 | | DocParagraph (Doc id) 9 | | DocIdentifier id 10 | | DocModule String 11 | | DocEmphasis (Doc id) 12 | | DocMonospaced (Doc id) 13 | | DocUnorderedList [Doc id] 14 | | DocOrderedList [Doc id] 15 | | DocDefList [(Doc id, Doc id)] 16 | | DocCodeBlock (Doc id) 17 | | DocHyperlink Hyperlink 18 | | DocPic String 19 | | DocAName String 20 | 21 | data Hyperlink = Hyperlink 22 | { hyperlinkUrl :: String 23 | , hyperlinkLabel :: Maybe String 24 | } deriving (Eq, Show) 25 | 26 | -- | DocMarkup is a set of instructions for marking up documentation. 27 | -- In fact, it's really just a mapping from 'Doc' to some other 28 | -- type [a], where [a] is usually the type of the output (HTML, say). 29 | 30 | data DocMarkup id a = Markup { 31 | markupEmpty :: a, 32 | markupString :: String -> a, 33 | markupParagraph :: a -> a, 34 | markupAppend :: a -> a -> a, 35 | markupIdentifier :: id -> a, 36 | markupModule :: String -> a, 37 | markupEmphasis :: a -> a, 38 | markupMonospaced :: a -> a, 39 | markupUnorderedList :: [a] -> a, 40 | markupOrderedList :: [a] -> a, 41 | markupDefList :: [(a,a)] -> a, 42 | markupCodeBlock :: a -> a, 43 | markupHyperlink :: Hyperlink -> a, 44 | markupAName :: String -> a, 45 | markupPic :: String -> a 46 | } 47 | 48 | type RdrName = String 49 | -------------------------------------------------------------------------------- /old-hackage-download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Script to get all the old hackage data 4 | 5 | IMPORTDATA_DIR=./import-data 6 | IMPORTDATA_DIR=$(realpath ${IMPORTDATA_DIR}) 7 | 8 | mkdir -p ${IMPORTDATA_DIR}/passwd/ ${IMPORTDATA_DIR}/archive/ ${IMPORTDATA_DIR}/docs/ 9 | 10 | echo "Syncing passwd files..." 11 | rsync -r -v hackage.haskell.org:/srv/www/hackage.haskell.org/passwd/ ${IMPORTDATA_DIR}/passwd/ 12 | 13 | echo "Syncing archive..." 14 | rsync -r -v -z --skip-compress=gz -f'- latest/' -f'- logs/' hackage.haskell.org:/srv/www/hackage.haskell.org/public_html/packages/archive/ ${IMPORTDATA_DIR}/archive/ 15 | 16 | echo "Syncing apache access logs..." 17 | rsync -r -v -z --skip-compress=gz -f'- error.log*' hackage.haskell.org:/var/log/apache2/ ${IMPORTDATA_DIR}/download-logs/ 18 | 19 | 20 | echo "Updating doc tarballs..." 21 | pushd ${IMPORTDATA_DIR}/docs 22 | for docdir in ${IMPORTDATA_DIR}/archive/*/*/doc/ 23 | do 24 | pkgverdir=$(dirname $docdir) 25 | pkgver=$(basename $pkgverdir) 26 | pkgname=$(basename $(dirname $pkgverdir)) 27 | docdir_new=${pkgname}-${pkgver}-docs 28 | tarball=${docdir_new}.tar.gz 29 | 30 | if [[ -d ${docdir}/html ]]; then 31 | true 32 | if ! [[ -h ${docdir_new} ]]; then 33 | ln -f -s ${docdir}/html ${docdir_new} 34 | fi 35 | if ! [[ -f ${tarball} ]]; then 36 | echo ${tarball} 37 | tar -h -c ${docdir_new} -zf ${tarball} --exclude=frames.html --exclude=mini_* --owner=nobody --group=users --format=ustar 38 | else 39 | echo -n '.' 40 | fi 41 | else 42 | echo !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! 43 | echo ${docdir}/html does not exist 44 | fi 45 | done 46 | popd 47 | -------------------------------------------------------------------------------- /datafiles/templates/Html/maintain.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Maintainers' page for $pkgname$ 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Maintainers' page for $pkgname$

13 | 14 |

Package maintainers (and Hackage trustees) can edit a few things about the 15 | package after its been released. 16 | 17 |

18 |
Package tags
19 |
Package tags are used to improve search results and related packages to 20 | each other. 21 |
22 | 23 |
Preferred versions
24 |
If you want users to keep using an older version when you release a newer 25 | version then you can set a preferred version range and tools like cabal 26 | will take it into account. 27 | You can also use this mechanism to deprecate individual versions (e.g. if 28 | you know they are broken) without deprecating the whole package. 29 |
30 | 31 |
Deprecation
32 |
You can deprecate the whole package (optionally in favour of some other 33 | package). 34 |
35 | 36 |
Maintainer group
37 |
Only these users are allowed to upload new versions of the package. 38 | Existing members can add other users into the maintainer group. 39 |
40 | 41 |
Cabal file metadata
42 |
You can edit certain bits of package metadata after a release, without uploading a new tarball. 43 |

$versions:{pkgid|$pkgid$}; separator=", "$

44 |
45 | 46 |
47 | 48 | -------------------------------------------------------------------------------- /Distribution/Server/Util/SigTerm.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | #if !(MIN_VERSION_base(4,6,0)) 3 | {-# LANGUAGE MagicHash, UnboxedTuples #-} 4 | #endif 5 | 6 | module Distribution.Server.Util.SigTerm (onSigTermCleanShutdown) where 7 | 8 | import System.Posix.Signals 9 | ( installHandler 10 | , Handler(Catch) 11 | , softwareTermination 12 | ) 13 | import Control.Exception 14 | ( AsyncException(UserInterrupt), throwTo ) 15 | import Control.Concurrent 16 | ( myThreadId ) 17 | #if MIN_VERSION_base(4,6,0) 18 | import Control.Concurrent 19 | ( ThreadId, mkWeakThreadId ) 20 | import System.Mem.Weak 21 | ( Weak ) 22 | #else 23 | import GHC.Conc.Sync 24 | ( ThreadId(..) ) 25 | import GHC.Weak 26 | ( Weak(..) ) 27 | import GHC.IO 28 | ( IO(IO) ) 29 | import GHC.Exts 30 | ( mkWeak#, unsafeCoerce# ) 31 | #endif 32 | import System.Mem.Weak 33 | ( deRefWeak ) 34 | 35 | -- | On SIGTERM, throw 'UserInterrupt' to the calling thread. 36 | -- 37 | onSigTermCleanShutdown :: IO () 38 | onSigTermCleanShutdown = do 39 | wtid <- mkWeakThreadId =<< myThreadId 40 | _ <- installHandler softwareTermination 41 | (Catch (cleanShutdownHandler wtid)) 42 | Nothing 43 | return () 44 | where 45 | cleanShutdownHandler :: Weak ThreadId -> IO () 46 | cleanShutdownHandler wtid = do 47 | mtid <- deRefWeak wtid 48 | case mtid of 49 | Nothing -> return () 50 | Just tid -> throwTo tid UserInterrupt 51 | 52 | #if !(MIN_VERSION_base(4,6,0)) 53 | mkWeakThreadId :: ThreadId -> IO (Weak ThreadId) 54 | mkWeakThreadId t@(ThreadId t#) = IO $ \s -> 55 | case mkWeak# t# t (unsafeCoerce# 0#) s of 56 | (# s1, w #) -> (# s1, Weak w #) 57 | #endif 58 | 59 | -------------------------------------------------------------------------------- /datafiles/templates/UserSignupReset/SignupRequest.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Register a new account 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Register a new account

13 | 14 |

Certain actions on this website require you to have an account. 15 | In particular you need an account to be able to upload or help maintain packages. 16 | 17 |

Using the form below you can register an account. 18 | 19 |

20 | 21 | 22 | 23 | 28 | 33 |
24 | 25 | This is what will be displayed on the site, e.g. Jan Novák 26 | 27 |
29 | 30 | This has to be ASCII with no spaces or symbols (except '_'), e.g. JanNovak 31 | 32 |
34 | 35 | e.g. jnovak@example.com (but do not use the style "Jan Novák" <jnovak@example.com>) 36 |
37 | 38 |

Your email address will be used to confirm your account (and if you ever 39 | need to reset your password). It will also be used if one of the site 40 | administrators ever needs to contact you. It will not be displayed on 41 | the website (but note that email addresses in .cabal files that you 42 | upload are public). 43 | 44 |

45 | 46 |

You will be sent an email containing a link to a page where you 47 | can set your password and activate your account. 48 |

49 | 50 |
51 | 52 | -------------------------------------------------------------------------------- /Distribution/Client/DistroMap.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternGuards #-} 2 | ----------------------------------------------------------------------------- 3 | -- | 4 | -- Module : Distribution.Client.DistroMap 5 | -- Copyright : (c) Duncan Coutts 2012 6 | -- License : BSD-like 7 | -- 8 | -- Maintainer : duncan@community.haskell.org 9 | -- 10 | -- Support for reading the distromap files of the old hackage server. 11 | ----------------------------------------------------------------------------- 12 | module Distribution.Client.DistroMap ( 13 | Entry(..), 14 | read, 15 | toCSV, 16 | ) where 17 | 18 | import Distribution.Package 19 | ( PackageName ) 20 | import Distribution.Version 21 | ( Version ) 22 | import Distribution.Text 23 | ( display, simpleParse ) 24 | 25 | import Text.CSV 26 | ( CSV ) 27 | import Network.URI 28 | ( URI, parseURI ) 29 | import Data.Either 30 | ( partitionEithers ) 31 | 32 | import Prelude hiding (read) 33 | 34 | data Entry = Entry PackageName Version (Maybe URI) 35 | deriving (Eq, Show) 36 | 37 | -- | Returns a list of log entries. 38 | -- 39 | read :: String -> ([String], [Entry]) 40 | read = partitionEithers . map parseLine . lines 41 | where 42 | parseLine line 43 | | [((pkgnamestr, pkgverstr, murlstr),_)] <- reads line 44 | , Just pkgname <- simpleParse pkgnamestr 45 | , Just pkgver <- simpleParse pkgverstr 46 | , Just murl <- maybe (Just Nothing) (fmap Just . parseURI) murlstr 47 | = Right (Entry pkgname pkgver murl) 48 | 49 | | otherwise 50 | = Left err 51 | where 52 | err = "Failed to parse distro map line:\n" ++ show line 53 | 54 | toCSV :: [Entry] -> CSV 55 | toCSV = map $ \(Entry pkgname pkgver murl) -> 56 | [display pkgname, display pkgver, maybe "" show murl] 57 | 58 | -------------------------------------------------------------------------------- /Distribution/Server/Util/TarIndex.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell, DeriveDataTypeable, TypeFamilies, TemplateHaskell, 2 | MultiParamTypeClasses, FlexibleInstances, FlexibleContexts #-} 3 | 4 | -- This is presently unused: features provide their own BlobId-to-TarIndex 5 | -- mappings. 6 | 7 | module Distribution.Server.Util.TarIndex 8 | where 9 | 10 | import Control.Applicative ((<$>)) 11 | import Control.Monad.Reader.Class (asks) 12 | import Control.Monad.State.Class (put, modify) 13 | import qualified Data.Map as Map 14 | 15 | import Data.Acid (makeAcidic) 16 | import Data.SafeCopy (base, deriveSafeCopy) 17 | import Data.TarIndex (TarIndex) 18 | 19 | import Distribution.Server.Framework.BlobStorage (BlobId) 20 | 21 | data TarIndexMap = M {indexMap :: Map.Map BlobId TarIndex} 22 | deriving (Typeable, Show) 23 | 24 | addIndex :: BlobId -> TarIndex -> Update TarIndexMap () 25 | addIndex blob index = modify $ insertTarIndex blob index 26 | 27 | insertTarIndex :: BlobId -> TarIndex -> TarIndexMap -> TarIndexMap 28 | insertTarIndex blob index (M state) = M (Map.insert blob index state) 29 | 30 | dropIndex :: BlobId -> Update TarIndexMap () 31 | dropIndex blob = modify $ \(M state) -> M (Map.delete blob state) 32 | 33 | lookupIndex :: BlobId -> Query TarIndexMap (Maybe TarIndex) 34 | lookupIndex blob = Map.lookup blob <$> asks indexMap 35 | 36 | replaceTarIndexMap :: TarIndexMap -> Update TarIndexMap () 37 | replaceTarIndexMap = put 38 | 39 | $(deriveSafeCopy 0 'base ''TarIndexMap) 40 | 41 | initialTarIndexMap :: TarIndexMap 42 | initialTarIndexMap = emptyTarIndex 43 | 44 | emptyTarIndex :: TarIndexMap 45 | emptyTarIndex = M Map.empty 46 | 47 | 48 | $(makeAcidic ''TarIndexMap 49 | [ 'addIndex 50 | , 'dropIndex 51 | , 'lookupIndex 52 | , 'replaceTarIndexMap 53 | ] 54 | ) 55 | -------------------------------------------------------------------------------- /Distribution/Server/Features/PackageCandidates/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, TemplateHaskell #-} 2 | ----------------------------------------------------------------------------- 3 | -- | 4 | -- Module : Distribution.Server.Features.Check.Types 5 | -- Copyright : (c) Matthew Gruen 2010 6 | -- License : BSD-like 7 | -- 8 | -- Data types for the candidate feature 9 | ----------------------------------------------------------------------------- 10 | module Distribution.Server.Features.PackageCandidates.Types where 11 | 12 | import Distribution.Server.Packages.Types (PkgInfo(..)) 13 | import Distribution.Server.Framework.Instances () 14 | import Distribution.Server.Framework.MemSize 15 | 16 | import Distribution.Package 17 | ( PackageIdentifier(..), Package(..) ) 18 | 19 | import Data.Typeable (Typeable) 20 | import Data.SafeCopy 21 | 22 | 23 | ------------------------------------------------------ 24 | -- | The information we keep about a candidate package. 25 | -- 26 | -- It's currently possible to have candidates for packages which don't exist yet. 27 | -- 28 | data CandPkgInfo = CandPkgInfo { 29 | -- there should be one ByteString and one BlobId per candidate. 30 | -- this was enforced in the types.. but it's easier to just 31 | -- reuse PkgInfo for the task. 32 | candPkgInfo :: !PkgInfo, 33 | -- | Warnings to display at the top of the package page. 34 | candWarnings :: ![String], 35 | -- | Whether to allow non-maintainers to view the page or not. 36 | candPublic :: !Bool 37 | } deriving (Show, Typeable, Eq) 38 | 39 | candInfoId :: CandPkgInfo -> PackageIdentifier 40 | candInfoId = pkgInfoId . candPkgInfo 41 | 42 | deriveSafeCopy 0 'base ''CandPkgInfo 43 | 44 | instance Package CandPkgInfo where packageId = candInfoId 45 | 46 | instance MemSize CandPkgInfo where 47 | memSize (CandPkgInfo a b c) = memSize3 a b c 48 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Tags/Backup.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Features.Tags.Backup ( 2 | tagsBackup, 3 | tagsToCSV, 4 | tagsToRecord 5 | ) where 6 | 7 | import Distribution.Server.Features.Tags.State 8 | import Distribution.Server.Framework.BackupRestore 9 | 10 | import Distribution.Package 11 | import Distribution.Text (display) 12 | 13 | import Text.CSV (CSV, Record) 14 | import qualified Data.Map as Map 15 | -- import Data.Set (Set) 16 | import qualified Data.Set as Set 17 | 18 | tagsBackup :: RestoreBackup PackageTags 19 | tagsBackup = updateTags emptyPackageTags 20 | 21 | updateTags :: PackageTags -> RestoreBackup PackageTags 22 | updateTags tagsState = RestoreBackup { 23 | restoreEntry = \(BackupByteString entry bs) -> 24 | if entry == ["tags.csv"] 25 | then do csv <- importCSV "tags.csv" bs 26 | tagsState' <- updateFromCSV csv tagsState 27 | return (updateTags tagsState') 28 | else return (updateTags tagsState) 29 | , restoreFinalize = return tagsState 30 | } 31 | 32 | updateFromCSV :: CSV -> PackageTags -> Restore PackageTags 33 | updateFromCSV = concatM . map fromRecord 34 | where 35 | fromRecord :: Record -> PackageTags -> Restore PackageTags 36 | fromRecord (packageField:tagFields) tagsState | not (null tagFields) = do 37 | pkgname <- parseText "package name" packageField 38 | tags <- mapM (parseText "tag") tagFields 39 | return (setTags pkgname (Set.fromList tags) tagsState) 40 | fromRecord x _ = fail $ "Invalid tags record: " ++ show x 41 | 42 | ------------------------------------------------------------------------------ 43 | tagsToCSV :: PackageTags -> CSV 44 | tagsToCSV = map (\(p, t) -> tagsToRecord p $ Set.toList t) 45 | . Map.toList . packageTags 46 | 47 | tagsToRecord :: PackageName -> [Tag] -> Record -- [String] 48 | tagsToRecord pkgname tags = display pkgname:map display tags 49 | 50 | -------------------------------------------------------------------------------- /Distribution/Server/Framework.hs: -------------------------------------------------------------------------------- 1 | -- | Re-export the common parts of the server framework. 2 | -- 3 | module Distribution.Server.Framework ( 4 | 5 | module Happstack.Server, 6 | module Data.Acid, 7 | module Distribution.Server.Framework.MemState, 8 | module Distribution.Server.Framework.Cache, 9 | module Distribution.Server.Framework.MemSize, 10 | 11 | module Distribution.Server.Framework.Auth, 12 | module Distribution.Server.Framework.Feature, 13 | module Distribution.Server.Framework.ServerEnv, 14 | module Distribution.Server.Framework.Resource, 15 | module Distribution.Server.Framework.RequestContentTypes, 16 | module Distribution.Server.Framework.ResponseContentTypes, 17 | module Distribution.Server.Framework.Hook, 18 | module Distribution.Server.Framework.Error, 19 | module Distribution.Server.Framework.Logging, 20 | module Distribution.Server.Util.Happstack, 21 | 22 | module Data.Monoid, 23 | module Control.Applicative, 24 | module Control.Monad, 25 | module Control.Monad.Trans, 26 | module System.FilePath, 27 | 28 | ) where 29 | 30 | import Happstack.Server 31 | 32 | import Data.Acid 33 | import Distribution.Server.Framework.MemState 34 | import Distribution.Server.Framework.Cache 35 | import Distribution.Server.Framework.MemSize 36 | 37 | import Distribution.Server.Framework.Auth (PrivilegeCondition(..)) 38 | import Distribution.Server.Framework.Feature 39 | import Distribution.Server.Framework.ServerEnv 40 | import Distribution.Server.Framework.Resource 41 | import Distribution.Server.Framework.RequestContentTypes 42 | import Distribution.Server.Framework.ResponseContentTypes 43 | import Distribution.Server.Framework.Hook 44 | import Distribution.Server.Framework.Error 45 | import Distribution.Server.Framework.Logging 46 | 47 | import Distribution.Server.Util.Happstack 48 | 49 | 50 | import Data.Monoid (Monoid(..)) 51 | import Control.Applicative (Applicative(..), (<$>)) 52 | import Control.Monad 53 | import Control.Monad.Trans (MonadIO, liftIO) 54 | import System.FilePath ((), (<.>)) 55 | -------------------------------------------------------------------------------- /Distribution/Server/Util/Happstack.hs: -------------------------------------------------------------------------------- 1 | 2 | {-| 3 | 4 | Functions and combinators to expose functioanlity buiding 5 | on happstack bit is not really specific to any one area 6 | of Hackage. 7 | 8 | -} 9 | 10 | module Distribution.Server.Util.Happstack ( 11 | remainingPath, 12 | remainingPathString, 13 | mime, 14 | consumeRequestBody, 15 | 16 | uriEscape 17 | ) where 18 | 19 | import Happstack.Server 20 | import qualified Data.Map as Map 21 | import System.FilePath.Posix (takeExtension, ()) 22 | import Control.Monad 23 | import qualified Data.ByteString.Lazy as BS 24 | import qualified Network.URI as URI 25 | 26 | 27 | -- |Passes a list of remaining path segments in the URL. Does not 28 | -- include the query string. This call only fails if the passed in 29 | -- handler fails. 30 | remainingPath :: Monad m => ([String] -> ServerPartT m a) -> ServerPartT m a 31 | remainingPath handle = do 32 | rq <- askRq 33 | localRq (\newRq -> newRq{rqPaths=[]}) $ handle (rqPaths rq) 34 | 35 | -- | Gets the string without altering the request. 36 | remainingPathString :: Monad m => ServerPartT m String 37 | remainingPathString = do 38 | strs <- liftM rqPaths askRq 39 | return $ if null strs then "" else foldr1 () . map uriEscape $ strs 40 | 41 | -- This disappeared from happstack in 7.1.7 42 | uriEscape :: String -> String 43 | uriEscape = URI.escapeURIString URI.isAllowedInURI 44 | 45 | -- |Returns a mime-type string based on the extension of the passed in 46 | -- file. 47 | mime :: FilePath -> String 48 | mime x = Map.findWithDefault "text/plain" (drop 1 (takeExtension x)) mimeTypes 49 | 50 | 51 | -- | Get the raw body of a PUT or POST request. 52 | -- 53 | -- Note that for performance reasons, this consumes the data and it cannot be 54 | -- called twice. 55 | -- 56 | consumeRequestBody :: Happstack m => m BS.ByteString 57 | consumeRequestBody = do 58 | mRq <- takeRequestBody =<< askRq 59 | case mRq of 60 | Nothing -> escape $ internalServerError $ toResponse 61 | "consumeRequestBody cannot be called more than once." 62 | Just (Body b) -> return b 63 | -------------------------------------------------------------------------------- /datafiles/templates/LegacyPasswds/htpasswd-upgrade.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Account upgrade 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Users moving from the old Hackage

13 |

This new Hackage implementation uses a somewhat more secure system 14 | for logging in. Because of this change, users who had accounts on the 15 | old system need to do a one-time upgrade step. 16 |

17 | 18 |
19 |

You will be prompted to enter your existing username and password. 20 | Your account will be re-enabled and you will then be able to use the 21 | new site normally. 22 |

23 | 24 |
25 | 26 |

Note that if the upgrade is successful then the old auth 27 | information will be deleted and trying to upgrade again will fail. 28 |

29 | 30 | 31 |

Technical details for the curious

32 | 33 |

The old hackage implementation used HTTP basic authentication. The new system uses HTTP digest authentication. 34 | 35 |

We could not transparently upgrade accounts to the new system because 36 | the password hash format is different for the new system. The old 37 | format was the 38 | 39 | Apache basic auth 'CRYPT' format, while the new format is 40 | equivalent to the 41 | 42 | Apache digest authentication format. It is not possible to generate 43 | the new format without access to the plaintext password – which 44 | was never stored. So by authenticating once using the old account 45 | information – using HTTP basic authentication – we can 46 | generate and store password digest for the new system. 47 | 48 |

49 | 50 | -------------------------------------------------------------------------------- /Distribution/Server/Features/HaskellPlatform/State.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, 2 | TypeFamilies, TemplateHaskell #-} 3 | 4 | module Distribution.Server.Features.HaskellPlatform.State where 5 | 6 | import Data.Acid (Query, Update, makeAcidic) 7 | import Data.Map (Map) 8 | import qualified Data.Map as Map 9 | import Data.SafeCopy (base, deriveSafeCopy) 10 | import Data.Set (Set) 11 | import qualified Data.Set as Set 12 | import Data.Typeable 13 | 14 | import Distribution.Server.Framework.Instances () 15 | import Distribution.Server.Framework.MemSize 16 | 17 | import Distribution.Package 18 | import Distribution.Version 19 | 20 | import Control.Monad.Reader (ask, asks) 21 | import Control.Monad.State (put, modify) 22 | 23 | newtype PlatformPackages = PlatformPackages { 24 | blessedPackages :: Map PackageName (Set Version) 25 | } deriving (Show, Typeable, Eq, MemSize) 26 | 27 | emptyPlatformPackages :: PlatformPackages 28 | emptyPlatformPackages = PlatformPackages Map.empty 29 | 30 | getPlatformPackages :: Query PlatformPackages PlatformPackages 31 | getPlatformPackages = ask 32 | 33 | getPlatformPackage :: PackageName -> Query PlatformPackages (Set Version) 34 | getPlatformPackage pkgname = asks (Map.findWithDefault Set.empty pkgname . blessedPackages) 35 | 36 | setPlatformPackage :: PackageName -> Set Version -> Update PlatformPackages () 37 | setPlatformPackage pkgname versions = modify $ \p -> case Set.null versions of 38 | True -> p { blessedPackages = Map.delete pkgname $ blessedPackages p } 39 | False -> p { blessedPackages = Map.insert pkgname versions $ blessedPackages p } 40 | 41 | replacePlatformPackages :: PlatformPackages -> Update PlatformPackages () 42 | replacePlatformPackages = put 43 | 44 | $(deriveSafeCopy 0 'base ''PlatformPackages) 45 | 46 | initialPlatformPackages :: PlatformPackages 47 | initialPlatformPackages = emptyPlatformPackages 48 | 49 | makeAcidic ''PlatformPackages ['getPlatformPackages 50 | ,'getPlatformPackage 51 | ,'setPlatformPackage 52 | ,'replacePlatformPackages 53 | ] 54 | 55 | -------------------------------------------------------------------------------- /datafiles/templates/EditCabalFile/cabalFileEditPage.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Edit package metadata for $pkgid$ 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Edit package metadata for $pkgid$ (Tech Preview)

13 | 14 |

NOTE: This is work in progress. It's not currently actually possible 15 | to publish new revisions (see Issue 52).

17 | 18 |

Package maintainers and Hackage trustees are allowed to edit certain bits 19 | of package metadata after a release, without uploading a new tarball. 20 | 21 |

22 | 23 |

24 | 25 | Reset 26 | 27 | $if(publish)$ 28 |

Cannot publish new revision

29 | $endif$ 30 | 31 | $if(first(errors))$ 32 |

Errors

33 | $errors:{error|

$error$

}$ 34 | $endif$ 35 | 36 | $if(first(changes))$ 37 |

Changes

38 |
    39 | $changes:{change|
  • Changed $change.what$ 40 | from

    $change.from$
    41 | to
    $change.to$
  • }$ 42 |
43 | $elseif(publish)$ 44 |

Errors

45 |

No changes? A new revision isn't really a revision without any changes!

46 | $elseif(!first(errors))$ 47 |

Changes

48 | No changes. 49 | $endif$ 50 |
51 | 52 |

Advice on adjusting version constraints

53 | 54 |

You can edit the version constraints for the dependencies, 55 | either to restrict or relax them. The goal in editing the constraints should 56 | always be to make them reflect reality. 57 |

    58 |
  • If the package fails to build against certain versions of a dependency 59 | then constrain the version. 60 |

  • If the package builds against and work correctly with a newer 61 | version of a dependency then it is ok to relax the constraint 62 |

      63 | 64 |
65 | 66 | 67 | -------------------------------------------------------------------------------- /Distribution/Server/Features/TarIndexCache/State.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell, TypeFamilies, DeriveDataTypeable, NamedFieldPuns #-} 2 | module Distribution.Server.Features.TarIndexCache.State ( 3 | TarIndexCache(..) 4 | , initialTarIndexCache 5 | , GetTarIndexCache(GetTarIndexCache) 6 | , ReplaceTarIndexCache(ReplaceTarIndexCache) 7 | , FindTarIndex(FindTarIndex) 8 | , SetTarIndex(SetTarIndex) 9 | ) where 10 | 11 | -- TODO: use strict map? (Can we rely on containers >= 0.5?) 12 | 13 | import Data.Typeable (Typeable) 14 | import Control.Monad.Reader (ask, asks) 15 | import Control.Monad.State (put, modify) 16 | import Data.Map (Map) 17 | import qualified Data.Map as Map 18 | import Control.Applicative ((<$>)) 19 | 20 | import Data.Acid (Query, Update, makeAcidic) 21 | import Data.SafeCopy (base, deriveSafeCopy) 22 | 23 | import Distribution.Server.Framework.BlobStorage 24 | import Distribution.Server.Framework.MemSize 25 | 26 | data TarIndexCache = TarIndexCache { 27 | tarIndexCacheMap :: Map BlobId BlobId 28 | } 29 | deriving (Eq, Show, Typeable) 30 | 31 | $(deriveSafeCopy 0 'base ''TarIndexCache) 32 | 33 | instance MemSize TarIndexCache where 34 | memSize st = 2 + memSize (tarIndexCacheMap st) 35 | 36 | initialTarIndexCache :: TarIndexCache 37 | initialTarIndexCache = TarIndexCache (Map.empty) 38 | 39 | getTarIndexCache :: Query TarIndexCache TarIndexCache 40 | getTarIndexCache = ask 41 | 42 | replaceTarIndexCache :: TarIndexCache -> Update TarIndexCache () 43 | replaceTarIndexCache = put 44 | 45 | getTarIndexCacheMap :: Query TarIndexCache (Map BlobId BlobId) 46 | getTarIndexCacheMap = asks tarIndexCacheMap 47 | 48 | modifyTarIndexCacheMap :: (Map BlobId BlobId -> Map BlobId BlobId) 49 | -> Update TarIndexCache () 50 | modifyTarIndexCacheMap f = modify $ \st@TarIndexCache{tarIndexCacheMap} -> 51 | st { tarIndexCacheMap = f tarIndexCacheMap } 52 | 53 | findTarIndex :: BlobId -> Query TarIndexCache (Maybe BlobId) 54 | findTarIndex blobId = Map.lookup blobId <$> getTarIndexCacheMap 55 | 56 | setTarIndex :: BlobId -> BlobId -> Update TarIndexCache () 57 | setTarIndex tar index = modifyTarIndexCacheMap (Map.insert tar index) 58 | 59 | makeAcidic ''TarIndexCache [ 60 | 'getTarIndexCache 61 | , 'replaceTarIndexCache 62 | , 'findTarIndex 63 | , 'setTarIndex 64 | ] 65 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Search/DocTermIds.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving #-} 2 | module Distribution.Server.Features.Search.DocTermIds ( 3 | DocTermIds, 4 | TermId, 5 | fieldLength, 6 | fieldTermCount, 7 | fieldElems, 8 | create, 9 | vecIndexIx, 10 | vecCreateIx, 11 | ) where 12 | 13 | import Distribution.Server.Features.Search.TermBag (TermBag, TermId) 14 | import qualified Distribution.Server.Features.Search.TermBag as TermBag 15 | 16 | import Distribution.Server.Framework.MemSize 17 | 18 | import Data.Vector (Vector, (!)) 19 | import qualified Data.Vector as Vec 20 | import Data.Ix (Ix) 21 | import qualified Data.Ix as Ix 22 | 23 | 24 | -- | The 'TermId's for the 'Term's that occur in a document. Documents may have 25 | -- multiple fields and the 'DocTerms' type holds them separately for each field. 26 | -- 27 | newtype DocTermIds field = DocTermIds (Vector TermBag) 28 | deriving (Show, MemSize) 29 | 30 | getField :: (Ix field, Bounded field) => DocTermIds field -> field -> TermBag 31 | getField (DocTermIds fieldVec) = vecIndexIx fieldVec 32 | 33 | create :: (Ix field, Bounded field) => 34 | (field -> [TermId]) -> DocTermIds field 35 | create docTermIds = 36 | DocTermIds (vecCreateIx (TermBag.fromList . docTermIds)) 37 | 38 | -- | The number of terms in a field within the document. 39 | fieldLength :: (Ix field, Bounded field) => DocTermIds field -> field -> Int 40 | fieldLength docterms field = 41 | TermBag.size (getField docterms field) 42 | 43 | -- | The frequency of a particular term in a field within the document. 44 | fieldTermCount :: (Ix field, Bounded field) => DocTermIds field -> field -> TermId -> Int 45 | fieldTermCount docterms field termid = 46 | TermBag.termCount (getField docterms field) termid 47 | 48 | fieldElems :: (Ix field, Bounded field) => DocTermIds field -> field -> [TermId] 49 | fieldElems docterms field = 50 | TermBag.elems (getField docterms field) 51 | 52 | --------------------------------- 53 | -- Vector indexed by Ix Bounded 54 | -- 55 | 56 | vecIndexIx :: (Ix ix, Bounded ix) => Vector a -> ix -> a 57 | vecIndexIx vec ix = vec ! Ix.index (minBound, maxBound) ix 58 | 59 | vecCreateIx :: (Ix ix, Bounded ix) => (ix -> a) -> Vector a 60 | vecCreateIx f = Vec.fromListN (Ix.rangeSize bounds) 61 | [ y | ix <- Ix.range bounds, let !y = f ix ] 62 | where 63 | bounds = (minBound, maxBound) 64 | 65 | -------------------------------------------------------------------------------- /Distribution/Client/TagsFile.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternGuards #-} 2 | ----------------------------------------------------------------------------- 3 | -- | 4 | -- Module : Distribution.Client.TagsFile 5 | -- Copyright : (c) Duncan Coutts 2012 6 | -- License : BSD-like 7 | -- 8 | -- Maintainer : duncan@community.haskell.org 9 | -- 10 | -- Support for reading the tags files of the old hackage server. 11 | ----------------------------------------------------------------------------- 12 | module Distribution.Client.TagsFile ( 13 | Entry, 14 | read, 15 | collectDeprecated, 16 | ) where 17 | 18 | import Distribution.Package 19 | ( PackageName, PackageId, packageName ) 20 | import Distribution.Text 21 | ( simpleParse ) 22 | import Distribution.Simple.Utils 23 | ( comparing, equating ) 24 | 25 | import Data.List 26 | ( foldl', sortBy, groupBy, stripPrefix ) 27 | import System.FilePath 28 | ( splitDirectories ) 29 | 30 | import Prelude hiding (read) 31 | 32 | data Entry = Entry PackageId Bool (Maybe PackageName) 33 | deriving (Eq, Ord, Show) 34 | 35 | -- | Returns a list of log entries, however some packages have been uploaded 36 | -- more than once, so each entry is paired with any older entries for the same 37 | -- package. 38 | -- 39 | read :: FilePath -> String -> Either String Entry 40 | read filepath content 41 | | ("tags":verstr:namestr:_) <- reverse (splitDirectories filepath) 42 | , Just pkgid <- simpleParse (namestr ++ "-" ++ verstr) 43 | = Right $! foldl' accum (Entry pkgid False Nothing) (lines content) 44 | 45 | | otherwise 46 | = Left $ "cannot get a package id from the file name " ++ filepath 47 | where 48 | accum e@(Entry pkgid deprecated replacement) s 49 | | Just "true" <- stripPrefix "deprecated: " s 50 | = Entry pkgid True replacement 51 | 52 | | Just newnamestr <- stripPrefix "superseded by: " s 53 | , Just newpkg <- simpleParse newnamestr 54 | = Entry pkgid deprecated (Just newpkg) 55 | 56 | | otherwise 57 | = e 58 | 59 | collectDeprecated :: [Entry] -> [(PackageName, Maybe PackageName)] 60 | collectDeprecated = 61 | map (\(Entry pkgid _ replacement) -> (packageName pkgid, replacement)) 62 | . filter (\(Entry _ deprecated _) -> deprecated) 63 | . map last 64 | . groupBy (equating (\(Entry pkgid _ _) -> packageName pkgid)) 65 | . sortBy (comparing (\(Entry pkgid _ _) -> pkgid)) 66 | 67 | -------------------------------------------------------------------------------- /Distribution/Server/Features/DownloadCount/Backup.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Features.DownloadCount.Backup ( 2 | onDiskBackup 3 | , onDiskRestore 4 | , inMemBackup 5 | , inMemRestore 6 | ) where 7 | 8 | import Distribution.Server.Framework.BackupRestore 9 | import Distribution.Server.Framework.BackupDump 10 | import Distribution.Server.Features.DownloadCount.State 11 | import Distribution.Server.Util.CountingMap 12 | import Distribution.Text (display, simpleParse) 13 | import Distribution.Version 14 | import Text.CSV (CSV) 15 | 16 | onDiskBackup :: OnDiskStats -> [BackupEntry] 17 | onDiskBackup onDisk = [csvToBackup ["ondisk.csv"] $ cmToCSV onDisk] 18 | 19 | onDiskRestore :: RestoreBackup OnDiskStats 20 | onDiskRestore = importOne "ondisk.csv" cmFromCSV 21 | 22 | inMemBackup :: InMemStats -> [BackupEntry] 23 | inMemBackup (InMemStats day inMemStats) = 24 | [csvToBackup ["inmem.csv"] $ 25 | [display versionCSV] 26 | : [display day] 27 | : cmToCSV inMemStats 28 | ] 29 | where 30 | versionCSV = Version [0,1] ["unstable"] 31 | 32 | inMemRestore :: RestoreBackup InMemStats 33 | inMemRestore = importOne "inmem.csv" importInMemStats 34 | 35 | importInMemStats :: Monad m => CSV -> m InMemStats 36 | importInMemStats (_version : [dayStr] : inMemStatsCSV) = do 37 | day <- case simpleParse dayStr of 38 | Just day -> return day 39 | Nothing -> fail "importInMemStats: Invalid day" 40 | inMemStats <- cmFromCSV inMemStatsCSV 41 | return (InMemStats day inMemStats) 42 | importInMemStats _ = 43 | fail "Invalid format for inmem.csv" 44 | 45 | {------------------------------------------------------------------------------ 46 | Auxiliary 47 | ------------------------------------------------------------------------------} 48 | 49 | -- TODO: should probably move this to the RestoreBackup module and use it 50 | -- elsewhere too 51 | importOne :: String -> (CSV -> Restore a) -> RestoreBackup a 52 | importOne name importA = aux Nothing 53 | where 54 | aux ma = RestoreBackup { 55 | restoreEntry = \entry -> case entry of 56 | BackupByteString name' bs | name' == [name] -> do 57 | csv <- importCSV name bs 58 | a <- importA csv 59 | return $ aux (Just a) 60 | _ -> 61 | return $ aux ma 62 | , restoreFinalize = case ma of 63 | Just a -> return a 64 | Nothing -> fail $ "Missing " ++ name 65 | } 66 | -------------------------------------------------------------------------------- /Distribution/Server/Packages/Index.hs: -------------------------------------------------------------------------------- 1 | ----------------------------------------------------------------------------- 2 | -- | 3 | -- Module : Distribution.Server.Packages.Index 4 | -- Copyright : (c) Duncan Coutts 2008 5 | -- License : BSD-like 6 | -- 7 | -- Maintainer : duncan@haskell.org 8 | -- Stability : provisional 9 | -- Portability : portable 10 | -- 11 | -- Create the package index. 12 | -- 13 | ----------------------------------------------------------------------------- 14 | module Distribution.Server.Packages.Index ( 15 | write, 16 | ) where 17 | 18 | import qualified Codec.Archive.Tar.Entry as Tar 19 | import qualified Distribution.Server.Util.Index as PackageIndex 20 | 21 | import Distribution.Server.Packages.Types 22 | ( CabalFileText(..), PkgInfo(..) ) 23 | import Distribution.Server.Users.Users 24 | ( Users, userIdToName ) 25 | 26 | import Distribution.Text 27 | ( display ) 28 | import Distribution.Server.Packages.PackageIndex (PackageIndex) 29 | import Data.Time.Clock 30 | ( UTCTime ) 31 | import Data.Time.Clock.POSIX 32 | ( utcTimeToPOSIXSeconds ) 33 | import Data.Int (Int64) 34 | 35 | import Data.Map (Map) 36 | import qualified Data.Map as Map 37 | import Data.ByteString.Lazy (ByteString) 38 | import Prelude hiding (read) 39 | 40 | -- Construct, with the specified user database, extra top-level files, and 41 | -- a package index, an index tarball. This tarball has the modification times 42 | -- and uploading users built-in. 43 | write :: Users -> Map String (ByteString, UTCTime) -> PackageIndex PkgInfo -> ByteString 44 | write users = PackageIndex.write (cabalFileByteString . pkgData) setModTime . extraEntries 45 | where 46 | setModTime pkgInfo entry = let (utime, uuser) = pkgUploadData pkgInfo in entry { 47 | Tar.entryTime = utcToUnixTime utime, 48 | Tar.entryOwnership = Tar.Ownership { 49 | Tar.ownerName = userName uuser, 50 | Tar.groupName = "Hackage", 51 | Tar.ownerId = 0, 52 | Tar.groupId = 0 53 | } 54 | } 55 | utcToUnixTime :: UTCTime -> Int64 56 | utcToUnixTime = truncate . utcTimeToPOSIXSeconds 57 | userName = display . userIdToName users 58 | extraEntries emap = do 59 | (path, (entry, mtime)) <- Map.toList emap 60 | Right tarPath <- return $ Tar.toTarPath False path 61 | return $ (Tar.fileEntry tarPath entry) { Tar.entryTime = utcToUnixTime mtime } 62 | 63 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/ServerEnv.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Framework.ServerEnv where 2 | 3 | import Distribution.Server.Framework.BlobStorage (BlobStorage) 4 | import Distribution.Server.Framework.Logging (Verbosity) 5 | import Distribution.Server.Framework.Templating (TemplatesMode) 6 | 7 | import qualified Network.URI as URI 8 | 9 | -- | The internal server environment as used by 'HackageFeature's. 10 | -- 11 | -- It contains various bits of static information (and handles of 12 | -- server-global objects) that are needed by the implementations of 13 | -- some 'HackageFeature's. 14 | -- 15 | data ServerEnv = ServerEnv { 16 | 17 | -- | The location of the server's static files 18 | serverStaticDir :: FilePath, 19 | 20 | -- | The location of the server's template files 21 | serverTemplatesDir :: FilePath, 22 | 23 | -- | Default templates mode 24 | serverTemplatesMode :: TemplatesMode, 25 | 26 | -- | The location of the server's state directory. This is where the 27 | -- server's persistent state is kept, e.g. using ACID state. 28 | serverStateDir :: FilePath, 29 | 30 | -- | The blob store is a specialised provider of persistent state for 31 | -- larger relatively-static blobs of data (e.g. uploaded tarballs). 32 | serverBlobStore :: BlobStorage, 33 | 34 | -- | The temporary directory the server has been configured to use. 35 | -- Use it for temp files such as when validating uploads. 36 | serverTmpDir :: FilePath, 37 | 38 | -- | The base URI of the server, just the hostname (and perhaps port). 39 | -- Use this if you need to construct absolute URIs pointing to the 40 | -- current server (e.g. as required in RSS feeds). 41 | serverBaseURI :: URI.URI, 42 | 43 | -- | A tunable parameter for cache policy. Setting this parameter high 44 | -- during bulk imports can very significantly improve performance. During 45 | -- normal operation it probably doesn't help much. 46 | 47 | -- By delaying cache updates we can sometimes save some effort: caches are 48 | -- based on a bit of changing state and if that state is updated more 49 | -- frequently than the time taken to update the cache, then we don't have 50 | -- to do as many cache updates as we do state updates. By artificially 51 | -- increasing the time taken to update the cache we can push this further. 52 | serverCacheDelay :: Int, 53 | 54 | serverVerbosity :: Verbosity 55 | } 56 | 57 | -------------------------------------------------------------------------------- /Distribution/Server/Util/ContentType.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Util.ContentType ( 2 | parseContentAccept 3 | ) where 4 | 5 | import Happstack.Server.Types (ContentType(..)) 6 | 7 | import qualified Text.ParserCombinators.ReadP as Parse 8 | import Control.Monad 9 | import Data.List (find, sortBy) 10 | import Data.Char (isAlphaNum, isDigit) 11 | import Data.Ord (comparing) 12 | 13 | -- data VaryFormat = Json | Xml | Html | Plain | Other 14 | 15 | -- do some special processing here to fix Webkit's effing issues (and IE's, less so) 16 | -- hackageVaryingAccept :: String -> [VaryFormat] 17 | 18 | -- this just returns a list of content-types sorted by quality preference 19 | parseContentAccept :: String -> [ContentType] 20 | parseContentAccept = process . maybe [] fst . find (null . snd) . Parse.readP_to_S parser 21 | where 22 | process :: [(a, Int)] -> [a] 23 | process = map fst . sortBy (flip (comparing snd)) . filter ((/=0) . snd) 24 | parser :: Parse.ReadP [(ContentType, Int)] 25 | parser = flip Parse.sepBy1 (Parse.char ',') $ do 26 | Parse.skipSpaces 27 | -- a more 'accurate' type than (String, String) 28 | -- might be Maybe (String, Maybe String) 29 | typ <- parseMediaType 30 | void $ Parse.char '/' 31 | subTyp <- parseMediaType 32 | quality <- Parse.option 1000 $ do 33 | Parse.skipSpaces >> Parse.string ";q=" >> Parse.skipSpaces 34 | parseQuality 35 | -- TODO: parse other parameters 36 | return (ContentType {ctType = typ, ctSubtype = subTyp, ctParameters = []}, quality) 37 | parseMediaType = (Parse.char '*' >> return []) Parse.<++ Parse.munch1 (\c -> case c of '-' -> True; '.' -> True; '+' -> True; _ -> isAlphaNum c) 38 | -- other characters technically allowed but never found in the wild: !#$%&^_`|~ 39 | parseQuality :: Parse.ReadP Int -- returns a quality in fixed point (0.75 -> 750) 40 | parseQuality = (Parse.char '1' >> Parse.optional (Parse.char '.' >> Parse.many (Parse.char '0')) >> return 1000) Parse.<++ 41 | (Parse.char '0' >> zeroOption (Parse.char '.' >> zeroOption munch3Digits)) 42 | zeroOption :: Parse.ReadP Int -> Parse.ReadP Int 43 | zeroOption p = p Parse.<++ return 0 44 | munch3Digits :: Parse.ReadP Int 45 | munch3Digits = fmap (\s -> read $ take 3 (s++"00") :: Int) (Parse.munch1 isDigit) 46 | 47 | --application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 48 | 49 | -------------------------------------------------------------------------------- /tests/Package.hs: -------------------------------------------------------------------------------- 1 | 2 | module Package (mkPackage) where 3 | 4 | import Codec.Archive.Tar 5 | import Codec.Archive.Tar.Entry 6 | import Codec.Compression.GZip 7 | import qualified Data.ByteString.Lazy.Char8 as BS 8 | import Data.Char 9 | import System.FilePath 10 | 11 | mkPackage :: String -> (FilePath, -- Tar filename 12 | String, -- Tar file content 13 | FilePath, -- Cabal filename in index 14 | String, -- Cabal file content 15 | FilePath, -- Haskell filename in source tree 16 | String) -- Haskell file content 17 | mkPackage name = (name ++ "-1.0.0.0.tar.gz", BS.unpack targz, 18 | name ++ "/1.0.0.0/" ++ name ++ ".cabal", cabalFile, 19 | modName <.> "hs", modFile) 20 | where targz = compress tar 21 | tar = write entries 22 | entries = [directoryEntry (mkTarPath dir), 23 | cabalEntry, 24 | modEntry] 25 | dir = name ++ "-1.0.0.0" 26 | modName = headToUpper name 27 | cabalEntry = fileEntry (mkTarPath (dir name <.> "cabal")) 28 | (BS.pack cabalFile) 29 | modEntry = fileEntry (mkTarPath (dir modName <.> "hs")) 30 | (BS.pack modFile) 31 | cabalFile = unlines [ 32 | "name: " ++ name, 33 | "version: 1.0.0.0", 34 | "synopsis: test package " ++ name, 35 | "cabal-version: >= 1.2", 36 | "build-type: Simple", 37 | "license: BSD3", 38 | "category: MyCategory", 39 | "", 40 | "Library {", 41 | " exposed-modules: " ++ modName, 42 | "}"] 43 | modFile = unlines [ 44 | "module " ++ modName ++ " where", 45 | "f" ++ name ++ " :: () -> ()", 46 | "f" ++ name ++ " () = ()"] 47 | 48 | mkTarPath :: FilePath -> TarPath 49 | mkTarPath fp = case toTarPath False fp of 50 | Left err -> error err 51 | Right tp -> tp 52 | 53 | headToUpper :: String -> String 54 | headToUpper [] = [] 55 | headToUpper (x : xs) = toUpper x : xs 56 | 57 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Distro/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE 2 | DeriveDataTypeable 3 | , GeneralizedNewtypeDeriving 4 | , TemplateHaskell 5 | #-} 6 | 7 | 8 | 9 | module Distribution.Server.Features.Distro.Types where 10 | 11 | import Distribution.Server.Framework.Instances () 12 | import Distribution.Server.Framework.MemSize 13 | import Distribution.Server.Users.State() 14 | import Distribution.Server.Users.Group (UserList) 15 | 16 | import qualified Data.Map as Map 17 | import qualified Data.Set as Set 18 | 19 | import qualified Distribution.Version as Version 20 | import Distribution.Package 21 | 22 | import Control.Applicative ((<$>)) 23 | 24 | import Distribution.Text (Text(..)) 25 | 26 | import qualified Distribution.Compat.ReadP as Parse 27 | import qualified Text.PrettyPrint as Disp 28 | import qualified Data.Char as Char 29 | 30 | import Data.SafeCopy (base, deriveSafeCopy) 31 | import Data.Typeable 32 | 33 | 34 | -- | Distribution names may contain letters, numbers and punctuation. 35 | newtype DistroName = DistroName String 36 | deriving (Eq, Ord, Read, Show, Typeable, MemSize) 37 | 38 | instance Text DistroName where 39 | disp (DistroName name) = Disp.text name 40 | parse = DistroName <$> Parse.munch1 (\c -> Char.isAlphaNum c || c `elem` "-_()[]{}=$,;") 41 | 42 | 43 | -- | Listing of known distirbutions and their maintainers 44 | data Distributions = Distributions { 45 | nameMap :: !(Map.Map DistroName UserList) 46 | } 47 | deriving (Eq, Typeable, Show) 48 | 49 | -- | Listing of which distirbutions have which version of particular 50 | -- packages. 51 | data DistroVersions = DistroVersions { 52 | packageDistroMap :: !(Map.Map PackageName (Map.Map DistroName DistroPackageInfo)), 53 | distroMap :: !(Map.Map DistroName (Set.Set PackageName)) 54 | } deriving (Eq, Typeable, Show) 55 | 56 | data DistroPackageInfo 57 | = DistroPackageInfo 58 | { distroVersion :: Version.Version 59 | , distroUrl :: String 60 | } 61 | deriving (Eq, Typeable, Show) 62 | 63 | $(deriveSafeCopy 0 'base ''DistroName) 64 | $(deriveSafeCopy 0 'base ''Distributions) 65 | $(deriveSafeCopy 0 'base ''DistroVersions) 66 | $(deriveSafeCopy 0 'base ''DistroPackageInfo) 67 | 68 | instance MemSize Distributions where 69 | memSize (Distributions a) = memSize1 a 70 | 71 | instance MemSize DistroVersions where 72 | memSize (DistroVersions a b) = memSize2 a b 73 | 74 | instance MemSize DistroPackageInfo where 75 | memSize (DistroPackageInfo a b) = memSize2 a b 76 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Documentation/State.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell #-} 2 | 3 | module Distribution.Server.Features.Documentation.State where 4 | 5 | import Distribution.Package 6 | import Distribution.Server.Framework.BlobStorage (BlobId) 7 | import Data.TarIndex () -- For SafeCopy instances 8 | import Distribution.Server.Framework.MemSize 9 | 10 | import Data.Acid (Query, Update, makeAcidic) 11 | import Data.SafeCopy (base, deriveSafeCopy) 12 | import Data.Typeable 13 | import Control.Monad.Reader 14 | import qualified Control.Monad.State as State 15 | 16 | import qualified Data.Map as Map 17 | 18 | ---------------------------------- Documentation 19 | data Documentation = Documentation { 20 | documentation :: !(Map.Map PackageIdentifier BlobId) 21 | } deriving (Typeable, Show, Eq) 22 | 23 | deriveSafeCopy 0 'base ''Documentation 24 | 25 | instance MemSize Documentation where 26 | memSize (Documentation a) = memSize1 a 27 | 28 | initialDocumentation :: Documentation 29 | initialDocumentation = Documentation Map.empty 30 | 31 | lookupDocumentation :: PackageIdentifier -> Query Documentation (Maybe BlobId) 32 | lookupDocumentation pkgId 33 | = do m <- asks documentation 34 | return $ Map.lookup pkgId m 35 | 36 | hasDocumentation :: PackageIdentifier -> Query Documentation Bool 37 | hasDocumentation pkgId 38 | = lookupDocumentation pkgId >>= \x -> case x of 39 | Just{} -> return True 40 | _ -> return False 41 | 42 | insertDocumentation :: PackageIdentifier -> BlobId -> Update Documentation () 43 | insertDocumentation pkgId blob 44 | = State.modify $ \doc -> doc {documentation = Map.insert pkgId blob (documentation doc)} 45 | 46 | removeDocumentation :: PackageIdentifier -> Update Documentation () 47 | removeDocumentation pkgId 48 | = State.modify $ \doc -> doc {documentation = Map.delete pkgId (documentation doc)} 49 | 50 | getDocumentation :: Query Documentation Documentation 51 | getDocumentation = ask 52 | 53 | -- |Replace all existing documentation 54 | replaceDocumentation :: Documentation -> Update Documentation () 55 | replaceDocumentation = State.put 56 | 57 | makeAcidic ''Documentation ['insertDocumentation 58 | ,'removeDocumentation 59 | ,'lookupDocumentation 60 | ,'hasDocumentation 61 | ,'getDocumentation 62 | ,'replaceDocumentation 63 | ] 64 | 65 | -------------------------------------------------------------------------------- /Distribution/Server/Users/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, TemplateHaskell #-} 2 | module Distribution.Server.Users.Types ( 3 | module Distribution.Server.Users.Types, 4 | module Distribution.Server.Framework.AuthTypes 5 | ) where 6 | 7 | import Distribution.Server.Framework.AuthTypes 8 | import Distribution.Server.Framework.MemSize 9 | 10 | import Distribution.Text 11 | ( Text(..) ) 12 | import qualified Distribution.Server.Util.Parse as Parse 13 | import qualified Distribution.Compat.ReadP as Parse 14 | import qualified Text.PrettyPrint as Disp 15 | import qualified Data.Char as Char 16 | 17 | import Control.Applicative ((<$>)) 18 | import Data.Aeson (ToJSON, FromJSON) 19 | import Data.SafeCopy (base, deriveSafeCopy) 20 | import Data.Typeable (Typeable) 21 | 22 | 23 | newtype UserId = UserId Int 24 | deriving (Eq, Ord, Show, Typeable, MemSize, ToJSON, FromJSON) 25 | 26 | newtype UserName = UserName String 27 | deriving (Eq, Ord, Show, Typeable, MemSize, ToJSON, FromJSON) 28 | 29 | data UserInfo = UserInfo { 30 | userName :: !UserName, 31 | userStatus :: !UserStatus 32 | } deriving (Eq, Show, Typeable) 33 | 34 | data UserStatus = AccountEnabled UserAuth 35 | | AccountDisabled (Maybe UserAuth) 36 | | AccountDeleted 37 | deriving (Eq, Show, Typeable) 38 | 39 | newtype UserAuth = UserAuth PasswdHash 40 | deriving (Show, Eq, Typeable) 41 | 42 | isActiveAccount :: UserStatus -> Bool 43 | isActiveAccount (AccountEnabled _) = True 44 | isActiveAccount (AccountDisabled _) = True 45 | isActiveAccount AccountDeleted = False 46 | 47 | instance MemSize UserInfo where 48 | memSize (UserInfo a b) = memSize2 a b 49 | 50 | instance MemSize UserStatus where 51 | memSize (AccountEnabled a) = memSize1 a 52 | memSize (AccountDisabled a) = memSize1 a 53 | memSize (AccountDeleted) = memSize0 54 | 55 | instance MemSize UserAuth where 56 | memSize (UserAuth a) = memSize1 a 57 | 58 | 59 | instance Text UserId where 60 | disp (UserId uid) = Disp.int uid 61 | parse = UserId <$> Parse.int 62 | 63 | instance Text UserName where 64 | disp (UserName name) = Disp.text name 65 | parse = UserName <$> Parse.munch1 Char.isAlphaNum 66 | 67 | 68 | $(deriveSafeCopy 0 'base ''UserId) 69 | $(deriveSafeCopy 0 'base ''UserName) 70 | $(deriveSafeCopy 1 'base ''UserAuth) 71 | $(deriveSafeCopy 0 'base ''UserStatus) 72 | $(deriveSafeCopy 0 'base ''UserInfo) 73 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Search/TermBag.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving #-} 2 | module Distribution.Server.Features.Search.TermBag ( 3 | TermId, 4 | TermBag, 5 | size, 6 | fromList, 7 | elems, 8 | termCount, 9 | ) where 10 | 11 | import Distribution.Server.Framework.MemSize 12 | 13 | import qualified Data.Vector.Unboxed as Vec 14 | import qualified Data.Map as Map 15 | import Data.Word (Word32) 16 | import Data.Bits 17 | 18 | newtype TermId = TermId Word32 19 | deriving (Eq, Ord, Show, Enum, MemSize) 20 | 21 | instance Bounded TermId where 22 | minBound = TermId 0 23 | maxBound = TermId 0x00FFFFFF 24 | 25 | data TermBag = TermBag !Int !(Vec.Vector TermIdAndCount) 26 | deriving Show 27 | 28 | -- We sneakily stuff both the TermId and the bag count into one 32bit word 29 | type TermIdAndCount = Word32 30 | 31 | -- Bottom 24 bits is the TermId, top 8 bits is the bag count 32 | termIdAndCount :: TermId -> Int -> TermIdAndCount 33 | termIdAndCount (TermId termid) freq = 34 | (min (fromIntegral freq) 255 `shiftL` 24) 35 | .|. (termid .&. 0x00FFFFFF) 36 | 37 | getTermId :: TermIdAndCount -> TermId 38 | getTermId word = TermId (word .&. 0x00FFFFFF) 39 | 40 | getTermCount :: TermIdAndCount -> Int 41 | getTermCount word = fromIntegral (word `shiftR` 24) 42 | 43 | 44 | size :: TermBag -> Int 45 | size (TermBag sz _) = sz 46 | 47 | elems :: TermBag -> [TermId] 48 | elems (TermBag _ vec) = map getTermId (Vec.toList vec) 49 | 50 | termCount :: TermBag -> TermId -> Int 51 | termCount (TermBag _ vec) = 52 | binarySearch 0 (Vec.length vec - 1) 53 | where 54 | binarySearch :: Int -> Int -> TermId -> Int 55 | binarySearch !a !b !key 56 | | a > b = 0 57 | | otherwise = 58 | let mid = (a + b) `div` 2 59 | tidAndCount = vec Vec.! mid 60 | in case compare key (getTermId tidAndCount) of 61 | LT -> binarySearch a (mid-1) key 62 | EQ -> getTermCount tidAndCount 63 | GT -> binarySearch (mid+1) b key 64 | 65 | fromList :: [TermId] -> TermBag 66 | fromList termids = 67 | let bag = Map.fromListWith (+) [ (t, 1) | t <- termids ] 68 | sz = Map.foldl' (+) 0 bag 69 | vec = Vec.fromListN (Map.size bag) 70 | [ termIdAndCount termid freq 71 | | (termid, freq) <- Map.toAscList bag ] 72 | in TermBag sz vec 73 | 74 | instance MemSize TermBag where 75 | memSize (TermBag _ vec) = 2 + memSizeUVector 2 vec 76 | 77 | -------------------------------------------------------------------------------- /Distribution/Server/Util/Histogram.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | 3 | module Distribution.Server.Util.Histogram where 4 | 5 | import Data.Map (Map) 6 | import qualified Data.Map as Map 7 | import Data.List (delete, sortBy) 8 | import Data.Ord (comparing) 9 | import Control.DeepSeq 10 | 11 | import Distribution.Server.Framework.MemSize 12 | 13 | 14 | -- | Histograms are intended to keep track of an integer attribute related 15 | -- to a collection of objects. 16 | data Histogram a = Histogram { 17 | histogram :: !(Map a Int), 18 | reverseHistogram :: !(Map Count [a]) 19 | } 20 | emptyHistogram :: Histogram a 21 | emptyHistogram = Histogram Map.empty Map.empty 22 | 23 | newtype Count = Count Int deriving (Eq, Show, NFData, MemSize) 24 | instance Ord Count where 25 | compare (Count a) (Count b) = compare b a 26 | 27 | instance NFData a => NFData (Histogram a) where 28 | rnf (Histogram a b) = rnf a `seq` rnf b 29 | 30 | instance MemSize a => MemSize (Histogram a) where 31 | memSize (Histogram a b) = memSize2 a b 32 | 33 | topCounts :: Ord a => Histogram a -> [(a, Int)] 34 | topCounts = concatMap (\(Count c, es) -> map (flip (,) c) es) . Map.toList . reverseHistogram 35 | 36 | topEntries :: Ord a => Histogram a -> [a] 37 | topEntries = concat . Map.elems . reverseHistogram 38 | 39 | getCount :: Ord a => Histogram a -> a -> Int 40 | getCount (Histogram hist _) entry = Map.findWithDefault 0 entry hist 41 | 42 | updateHistogram :: Ord a => a -> Int -> Histogram a -> Histogram a 43 | updateHistogram entry new (Histogram hist rev) = 44 | let old = Map.findWithDefault 0 entry hist 45 | in Histogram 46 | (Map.insert entry new hist) 47 | (Map.alter putInEntry (Count new) . Map.alter takeOutEntry (Count old) $ rev) 48 | where 49 | takeOutEntry Nothing = Nothing 50 | takeOutEntry (Just l) = case delete entry l of 51 | [] -> Nothing 52 | l' -> Just l' 53 | putInEntry Nothing = Just [entry] 54 | putInEntry (Just l) = Just (entry:l) 55 | 56 | constructHistogram :: Ord a => [(a, Int)] -> Histogram a 57 | constructHistogram assoc = Histogram 58 | (Map.fromList assoc) 59 | (Map.fromListWith (++) . map toSingle $ assoc) 60 | where toSingle (entry, c) = (Count c, [entry]) 61 | 62 | sortByCounts :: Ord a => (b -> a) -> Histogram a -> [b] -> [(b, Int)] 63 | sortByCounts entryFunc (Histogram hist _) items = 64 | let modEntry item = (item, Map.findWithDefault 0 (entryFunc item) hist) 65 | in sortBy (comparing snd) $ map modEntry items 66 | 67 | -------------------------------------------------------------------------------- /Distribution/Server/Util/TextSearch.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Util.TextSearch ( 2 | TextSearch(..), 3 | constructTextIndex, 4 | searchText 5 | ) where 6 | 7 | import Data.ByteString (ByteString) 8 | import qualified Data.ByteString.Char8 as BS -- TODO: Deal with UTF8 9 | import Data.ByteString.Search 10 | import Data.Map (Map) 11 | import qualified Data.Map as Map 12 | import Data.Char 13 | import Data.Maybe (catMaybes) 14 | import Control.DeepSeq 15 | 16 | import Distribution.Server.Framework.MemSize 17 | 18 | 19 | -- Basic full text search. This works best when there are plenty of entries 20 | -- and all of them are short. I'd use something like Hayoo here, but there's 21 | -- no easy way to integrate it into the site. 22 | -- 23 | -- At present this uses Bayer-Moore. Something with multiple search keys 24 | -- might be more flexible. Or, even better, a Lucene-like engine. 25 | data TextSearch = TextSearch { 26 | fullText :: !ByteString, 27 | textIndex :: !(Map Int (String, String)) 28 | } deriving Show 29 | 30 | instance NFData TextSearch 31 | 32 | constructTextIndex :: [(String, String)] -> TextSearch 33 | constructTextIndex strs = case go strs 0 of 34 | (bs, texts) -> TextSearch (BS.concat bs) (Map.fromList texts) 35 | where 36 | go :: [(String, String)] -> Int -> ([ByteString], [(Int, (String, String))]) 37 | go [] _ = ([], []) 38 | go (pair@(_, text):xs) pos = 39 | let text' = BS.pack $ "\0" ++ stripText text 40 | in case go xs (BS.length text' + pos) of 41 | ~(bs, texts) -> (text':bs, (pos, pair):texts) 42 | 43 | stripText :: String -> String 44 | stripText = map toLower . filter (\c -> isSpace c || isAlphaNum c) 45 | 46 | searchText :: TextSearch -> String -> [(String, String)] 47 | searchText (TextSearch theText theIndex) str = 48 | Map.toList . Map.fromAscListWith const 49 | . catMaybes . map (\i -> getIndexEntry (fromIntegral i) theIndex) 50 | $ nonOverlappingIndices (BS.pack $ stripText str) theText 51 | 52 | -- TODO: offset might be useful for determining whether the match was whole-word 53 | -- or no 54 | getIndexEntry :: Int -> Map Int a -> Maybe a 55 | getIndexEntry index theIndex = case Map.splitLookup index theIndex of 56 | (_, Just entry, _) -> Just entry 57 | (beforeMap, _, afterMap) -> case (Map.null beforeMap, Map.null afterMap) of 58 | (True, True) -> Nothing 59 | (True, False) -> Just $ snd $ Map.findMin afterMap 60 | (False, _) -> Just $ snd $ Map.findMax beforeMap 61 | 62 | instance MemSize TextSearch where 63 | memSize (TextSearch a b) = memSize2 a b 64 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Upload/Backup.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternGuards #-} 2 | 3 | module Distribution.Server.Features.Upload.Backup ( 4 | maintainerBackup, 5 | maintToExport, 6 | maintToCSV 7 | ) where 8 | 9 | import Distribution.Server.Features.Upload.State 10 | 11 | import Distribution.Server.Users.Group (UserList(..)) 12 | import Distribution.Server.Framework.BackupRestore 13 | import Distribution.Server.Framework.BackupDump 14 | 15 | import Distribution.Package 16 | import Distribution.Text 17 | import Data.Version 18 | import Text.CSV (CSV, Record) 19 | 20 | import Data.Map (Map) 21 | import qualified Data.Map as Map 22 | import qualified Data.IntSet as IntSet 23 | 24 | ------------------------------------------------------------------------------- 25 | -- Maintainer groups backup 26 | maintainerBackup :: RestoreBackup PackageMaintainers 27 | maintainerBackup = updateMaintainers Map.empty 28 | 29 | updateMaintainers :: Map PackageName UserList -> RestoreBackup PackageMaintainers 30 | updateMaintainers mains = RestoreBackup { 31 | restoreEntry = \entry -> do 32 | case entry of 33 | BackupByteString ["maintainers.csv"] bs -> do 34 | csv <- importCSV "maintainers.csv" bs 35 | mains' <- importMaintainers csv mains 36 | return (updateMaintainers mains') 37 | _ -> 38 | return (updateMaintainers mains) 39 | , restoreFinalize = 40 | return $ PackageMaintainers (mains) 41 | } 42 | 43 | importMaintainers :: CSV -> Map PackageName UserList -> Restore (Map PackageName UserList) 44 | importMaintainers = concatM . map fromRecord . drop 2 45 | where 46 | fromRecord :: Record -> Map PackageName UserList -> Restore (Map PackageName UserList) 47 | fromRecord (packageStr:idStr) mains = do 48 | pkgname <- parseText "package name" packageStr 49 | ids <- mapM (parseRead "user id") idStr 50 | return (Map.insert pkgname (UserList $ IntSet.fromList ids) mains) 51 | fromRecord x _ = fail $ "Invalid package maintainer record: " ++ show x 52 | 53 | maintToExport :: Map PackageName UserList -> BackupEntry 54 | maintToExport pkgmap = csvToBackup ["maintainers.csv"] (maintToCSV assocUsers) 55 | where assocUsers = map (\(name, UserList ul) -> (name, IntSet.toList ul)) 56 | $ Map.toList pkgmap 57 | 58 | maintToCSV :: [(PackageName, [Int])] -> CSV 59 | maintToCSV users = [showVersion pkgCSVVer]:pkgCSVKey: 60 | map (\(name, ids) -> display name:map show ids) users 61 | where 62 | pkgCSVKey = ["package", "maintainers"] 63 | pkgCSVVer = Version [0,1] ["unstable"] 64 | 65 | -------------------------------------------------------------------------------- /tests/Run.hs: -------------------------------------------------------------------------------- 1 | 2 | module Run (run) where 3 | 4 | import Control.Concurrent 5 | import Control.Exception as Exception 6 | import Control.Monad 7 | import System.Exit 8 | import System.IO 9 | import System.IO.Error 10 | import System.Posix 11 | 12 | run :: FilePath -> [String] -> IO (Maybe ExitCode) 13 | run cmd args = do 14 | ei <- tryIOError $ do pid <- systemSession cmd args 15 | return pid 16 | case ei of 17 | Left _ -> return Nothing 18 | Right pid -> 19 | do mv <- newEmptyMVar 20 | void $ forkIO $ do r <- getProcessStatus True False pid 21 | putMVar mv r 22 | r <- takeMVar mv 23 | case r of 24 | Nothing -> do 25 | putStrLn "getProcessStatus Nothing, so killing" 26 | killProcessGroup pid 27 | return Nothing 28 | Just (Exited ec) -> return (Just ec) 29 | Just (Terminated _) -> return Nothing 30 | Just _ -> return Nothing 31 | `Exception.catch` \e -> 32 | do putStrLn ("Got " ++ show (e :: SomeException) ++ ", so killing") 33 | killProcessGroup pid 34 | return Nothing 35 | 36 | systemSession :: FilePath -> [String] -> IO ProcessID 37 | systemSession cmd args = 38 | forkProcess $ do 39 | void createSession 40 | executeFile cmd False args Nothing 41 | -- need to use exec() directly here, rather than something like 42 | -- System.Process.system, because we are in a forked child and some 43 | -- pthread libraries get all upset if you start doing certain 44 | -- things in a forked child of a pthread process, such as forking 45 | -- more threads. 46 | 47 | killProcessGroup :: ProcessID -> IO () 48 | killProcessGroup pid = do 49 | ignoreIOExceptions (signalProcessGroup sigTERM pid) 50 | checkReallyDead 10 51 | where 52 | checkReallyDead :: Integer -> IO () 53 | checkReallyDead 0 = hPutStrLn stderr "checkReallyDead: Giving up" 54 | checkReallyDead n = 55 | do threadDelay (3*100000) -- 3/10 sec 56 | m <- tryJust (guard . isDoesNotExistError) $ 57 | getProcessStatus False False pid 58 | case m of 59 | Right Nothing -> return () 60 | Left _ -> return () 61 | _ -> do 62 | ignoreIOExceptions (signalProcessGroup sigKILL pid) 63 | checkReallyDead (n - 1) 64 | 65 | ignoreIOExceptions :: IO () -> IO () 66 | ignoreIOExceptions io = io `catchIOError` ((\_ -> return ())) 67 | 68 | -------------------------------------------------------------------------------- /datafiles/templates/AdminFrontend/admin.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: admin front-end 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

Admin front-end

13 | 14 |

User accounts

15 | 46 | 47 |

Account signup and reset requests

48 | 49 |
    50 |
  • 51 |
    52 | 53 | 54 |
    55 | $if(first(signups))$ 56 |

    Found signup requests:

    57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | $signups:{signup| 65 | 66 | 67 | 68 | 69 | 70 | 71 | }$ 72 |
    User nameReal nameEmailTimestamp
    $signup.username$$signup.realname$$signup.email$$signup.timestamp$
    73 | $elseif(findSignup)$ 74 |

    No matching signup requests found

    75 | $endif$ 76 |
  • 77 |
  • Browse all signup requests
  • 78 |
  • Browse all reset requests
  • 79 |
80 | 81 |

Server status

82 | 85 | 86 | 87 |

TODO

88 | 89 |

TODO list for this admin interface 90 |

91 |
    92 |
  • Account username change
  • 93 |
  • Account undelete
  • 94 |
95 | 96 |
97 | 98 | 99 | -------------------------------------------------------------------------------- /Distribution/Server/Features/BuildReports/State.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell, 2 | FlexibleInstances, FlexibleContexts, MultiParamTypeClasses, 3 | TypeOperators, TypeSynonymInstances #-} 4 | {-# OPTIONS_GHC -fno-warn-orphans #-} 5 | module Distribution.Server.Features.BuildReports.State where 6 | 7 | import Distribution.Server.Features.BuildReports.BuildReports (BuildReportId, BuildLog, BuildReport, BuildReports) 8 | import qualified Distribution.Server.Features.BuildReports.BuildReports as BuildReports 9 | 10 | import Distribution.Package 11 | 12 | import Control.Monad.Reader 13 | import qualified Control.Monad.State as State 14 | import Data.Acid (Query, Update, makeAcidic) 15 | 16 | initialBuildReports :: BuildReports 17 | initialBuildReports = BuildReports.emptyReports 18 | 19 | -- and defined methods 20 | addReport :: PackageId -> (BuildReport, Maybe BuildLog) -> Update BuildReports BuildReportId 21 | addReport pkgid report = do 22 | buildReports <- State.get 23 | let (reports, reportId) = BuildReports.addReport pkgid report buildReports 24 | State.put reports 25 | return reportId 26 | 27 | setBuildLog :: PackageId -> BuildReportId -> Maybe BuildLog -> Update BuildReports Bool 28 | setBuildLog pkgid reportId buildLog = do 29 | buildReports <- State.get 30 | case BuildReports.setBuildLog pkgid reportId buildLog buildReports of 31 | Nothing -> return False 32 | Just reports -> State.put reports >> return True 33 | 34 | deleteReport :: PackageId -> BuildReportId -> Update BuildReports Bool --Maybe BuildReports 35 | deleteReport pkgid reportId = do 36 | buildReports <- State.get 37 | case BuildReports.deleteReport pkgid reportId buildReports of 38 | Nothing -> return False 39 | Just reports -> State.put reports >> return True 40 | 41 | lookupReport :: PackageId -> BuildReportId -> Query BuildReports (Maybe (BuildReport, Maybe BuildLog)) 42 | lookupReport pkgid reportId = asks (BuildReports.lookupReport pkgid reportId) 43 | 44 | lookupPackageReports :: PackageId -> Query BuildReports [(BuildReportId, (BuildReport, Maybe BuildLog))] 45 | lookupPackageReports pkgid = asks (BuildReports.lookupPackageReports pkgid) 46 | 47 | getBuildReports :: Query BuildReports BuildReports 48 | getBuildReports = ask 49 | 50 | replaceBuildReports :: BuildReports -> Update BuildReports () 51 | replaceBuildReports = State.put 52 | 53 | makeAcidic ''BuildReports ['addReport 54 | ,'setBuildLog 55 | ,'deleteReport 56 | ,'lookupReport 57 | ,'lookupPackageReports 58 | ,'getBuildReports 59 | ,'replaceBuildReports 60 | ] 61 | 62 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/AuthCrypt.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Framework.AuthCrypt ( 2 | PasswdPlain(..), 3 | PasswdHash(..), 4 | newPasswdHash, 5 | checkBasicAuthInfo, 6 | BasicAuthInfo(..), 7 | checkDigestAuthInfo, 8 | DigestAuthInfo(..), 9 | QopInfo(..), 10 | ) where 11 | 12 | import Distribution.Server.Framework.AuthTypes 13 | import Distribution.Server.Users.Types (UserName(..)) 14 | 15 | import Data.Digest.Pure.MD5 (md5) 16 | import qualified Data.ByteString.Lazy.Char8 as BS.Lazy -- Only used for ASCII data 17 | import Data.List (intercalate) 18 | 19 | -- Hashed passwords are stored in the format: 20 | -- 21 | -- @md5 (username ++ ":" ++ realm ++ ":" ++ password)@. 22 | -- 23 | -- This format enables us to use either the basic or digest 24 | -- HTTP authentication methods. 25 | 26 | -- | Create a new 'PasswdHash' suitable for safe permanent storage. 27 | -- 28 | newPasswdHash :: RealmName -> UserName -> PasswdPlain -> PasswdHash 29 | newPasswdHash (RealmName realmName) (UserName userName) (PasswdPlain passwd) = 30 | PasswdHash $ md5HexDigest [userName, realmName, passwd] 31 | 32 | ------------------ 33 | -- HTTP Basic auth 34 | -- 35 | 36 | data BasicAuthInfo = BasicAuthInfo { 37 | basicRealm :: RealmName, 38 | basicUsername :: UserName, 39 | basicPasswd :: PasswdPlain 40 | } 41 | 42 | checkBasicAuthInfo :: PasswdHash -> BasicAuthInfo -> Bool 43 | checkBasicAuthInfo hash (BasicAuthInfo realmName userName pass) = 44 | newPasswdHash realmName userName pass == hash 45 | 46 | ------------------ 47 | -- HTTP Digest auth 48 | -- 49 | 50 | data DigestAuthInfo = DigestAuthInfo { 51 | digestUsername :: UserName, 52 | digestNonce :: String, 53 | digestResponse :: String, 54 | digestURI :: String, 55 | digestRqMethod :: String, 56 | digestQoP :: QopInfo 57 | } 58 | deriving Show 59 | 60 | data QopInfo = QopNone 61 | | QopAuth { 62 | digestNonceCount :: String, 63 | digestClientNonce :: String 64 | } 65 | -- | QopAuthInt 66 | deriving Show 67 | 68 | -- See RFC 2617 http://www.ietf.org/rfc/rfc2617 69 | -- 70 | checkDigestAuthInfo :: PasswdHash -> DigestAuthInfo -> Bool 71 | checkDigestAuthInfo (PasswdHash passwdHash) 72 | (DigestAuthInfo _username nonce response uri method qopinfo) = 73 | hash3 == response 74 | where 75 | hash1 = passwdHash 76 | hash2 = md5HexDigest [method, uri] 77 | hash3 = case qopinfo of 78 | QopNone -> md5HexDigest [hash1, nonce, hash2] 79 | QopAuth nc cnonce -> md5HexDigest [hash1, nonce, nc, cnonce, "auth", hash2] 80 | 81 | ------------------ 82 | -- Utils 83 | -- 84 | 85 | md5HexDigest :: [String] -> String 86 | md5HexDigest = show . md5 . BS.Lazy.pack . intercalate ":" 87 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Group.hs: -------------------------------------------------------------------------------- 1 | -- Body of the HTML page for a package 2 | module Distribution.Server.Pages.Group ( 3 | groupPage, 4 | renderGroupName 5 | ) where 6 | 7 | import Text.XHtml.Strict 8 | import System.FilePath.Posix (()) 9 | import Distribution.Server.Pages.Template (hackagePage) 10 | import qualified Distribution.Server.Users.Types as Users 11 | import Distribution.Server.Users.Group (GroupDescription(..)) 12 | import qualified Distribution.Server.Users.Group as Group 13 | import Distribution.Text 14 | import Data.Maybe 15 | 16 | renderGroupName :: GroupDescription -> Maybe String -> Html 17 | renderGroupName desc murl = 18 | maybeUrl (groupTitle desc) murl 19 | +++ 20 | maybe noHtml (\(for, mfor) -> " for " +++ maybeUrl for mfor) (groupEntity desc) 21 | where maybeUrl text = maybe (toHtml text) (\url -> anchor ! [href url] << text) 22 | 23 | -- Primitive access control: the URI to post a new user request to, or the the URI/user/ to DELETE 24 | -- if neither adding or removing is enabled, a link to a URI/edit page is provided 25 | groupPage :: [Users.UserName] -> String -> (Bool, Bool) -> GroupDescription -> Html 26 | groupPage users baseUri controls desc = hackagePage (Group.groupName desc) (groupBody users baseUri controls desc) 27 | 28 | -- | Body of the page 29 | -- If either addUri or removeUri are true, it can be assumed that we are one the 30 | -- \/edit subpage of the group. 31 | groupBody :: [Users.UserName] -> String -> (Bool, Bool) -> GroupDescription -> [Html] 32 | groupBody users baseUri (addUri, removeUri) desc = 33 | [ h2 << renderGroupName desc (if addUri || removeUri then Just baseUri else Nothing) 34 | , paragraph << 35 | [ toHtml $ groupPrologue desc 36 | , if addUri || removeUri then noHtml else thespan ! [thestyle "color: gray"] << 37 | [ toHtml " [" 38 | , anchor ! [href $ baseUri "edit"] << "edit" 39 | , toHtml "]" 40 | ] 41 | ] 42 | , listGroup users (if removeUri then Just baseUri else Nothing) 43 | , if addUri then concatHtml $ addUser baseUri else noHtml 44 | ] 45 | 46 | addUser :: String -> [Html] 47 | addUser uri = 48 | [ h3 << "Add user" 49 | , gui uri ! [theclass "box"] << 50 | [ p << [stringToHtml "User: ", textfield "user"] 51 | , submit "submit" "Add member" 52 | ] 53 | ] 54 | 55 | removeUser :: Users.UserName -> String -> [Html] 56 | removeUser uname uri = 57 | [ toHtml " ", 58 | gui (uri "user" display uname) << 59 | [ hidden "_method" "DELETE" 60 | , submit "submit" "Remove" 61 | ] 62 | ] 63 | 64 | listGroup :: [Users.UserName] -> Maybe String -> Html 65 | listGroup [] _ = p << "No member exist presently" 66 | listGroup users muri = unordList (map displayName users) 67 | where displayName uname = (anchor ! [href $ "/user/" ++ display uname] << display uname) +++ 68 | fromMaybe [] (fmap (removeUser uname) muri) 69 | 70 | -------------------------------------------------------------------------------- /Distribution/Server/Features/PackageCandidates/State.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell #-} 2 | 3 | module Distribution.Server.Features.PackageCandidates.State where 4 | 5 | import Distribution.Server.Features.PackageCandidates.Types 6 | import Distribution.Server.Framework.MemSize 7 | 8 | import qualified Distribution.Server.Packages.PackageIndex as PackageIndex 9 | import Distribution.Package 10 | 11 | import Data.Acid (Query, Update, makeAcidic) 12 | import Data.SafeCopy (deriveSafeCopy, base) 13 | import Data.Typeable 14 | import Control.Monad.Reader 15 | import qualified Control.Monad.State as State 16 | import Data.Monoid 17 | 18 | 19 | ---------------------------------- Index of candidate tarballs and metadata 20 | -- boilerplate code based on PackagesState 21 | data CandidatePackages = CandidatePackages { 22 | candidateList :: !(PackageIndex.PackageIndex CandPkgInfo) 23 | } deriving (Typeable, Show, Eq) 24 | 25 | deriveSafeCopy 0 'base ''CandidatePackages 26 | 27 | instance MemSize CandidatePackages where 28 | memSize (CandidatePackages a) = memSize1 a 29 | 30 | initialCandidatePackages :: CandidatePackages 31 | initialCandidatePackages = CandidatePackages { 32 | candidateList = mempty 33 | } 34 | 35 | replaceCandidate :: CandPkgInfo -> Update CandidatePackages () 36 | replaceCandidate pkg = State.modify $ \candidates -> candidates { candidateList = replaceVersions (candidateList candidates) } 37 | where replaceVersions = PackageIndex.insert pkg . PackageIndex.deletePackageName (packageName pkg) 38 | 39 | addCandidate :: CandPkgInfo -> Update CandidatePackages () 40 | addCandidate pkg = State.modify $ \candidates -> candidates { candidateList = addVersion (candidateList candidates) } 41 | where addVersion = PackageIndex.insert pkg 42 | 43 | deleteCandidate :: PackageId -> Update CandidatePackages () 44 | deleteCandidate pkg = State.modify $ \candidates -> candidates { candidateList = deleteVersion (candidateList candidates) } 45 | where deleteVersion = PackageIndex.deletePackageId pkg 46 | 47 | deleteCandidates :: PackageName -> Update CandidatePackages () 48 | deleteCandidates pkg = State.modify $ \candidates -> candidates { candidateList = deleteVersions (candidateList candidates) } 49 | where deleteVersions = PackageIndex.deletePackageName pkg 50 | 51 | -- |Replace all existing packages and reports 52 | replaceCandidatePackages :: CandidatePackages -> Update CandidatePackages () 53 | replaceCandidatePackages = State.put 54 | 55 | getCandidatePackages :: Query CandidatePackages CandidatePackages 56 | getCandidatePackages = ask 57 | 58 | 59 | makeAcidic ''CandidatePackages ['getCandidatePackages 60 | ,'replaceCandidatePackages 61 | ,'replaceCandidate 62 | ,'addCandidate 63 | ,'deleteCandidate 64 | ,'deleteCandidates 65 | ] 66 | 67 | -------------------------------------------------------------------------------- /old-hackage-import.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | HACKAGE_SERVER=hackage-server 4 | HACKAGE_IMPORT=hackage-import 5 | 6 | # number of cores to run the server on 7 | CORES=1 8 | 9 | # number of concurrent upload jobs 10 | JOBS=10 11 | 12 | 13 | STATE_DIR=./state 14 | STATIC_DIR=./datafiles 15 | 16 | IMPORTDATA_DIR=./import-data 17 | ADMIN_PASSWD=admin 18 | 19 | SERVER_HOST=localhost 20 | SERVER_PORT=8080 21 | SERVER_URL=http://admin:${ADMIN_PASSWD}@${SERVER_HOST}:${SERVER_PORT} 22 | 23 | LOG_DIR=./logs 24 | SERVER_LOG=${LOG_DIR}/server.log 25 | IMPORT_LOG=${LOG_DIR}/import.log 26 | 27 | echo "initialising server..." 28 | ${HACKAGE_SERVER} init --static=${STATIC_DIR} --state=${STATE_DIR} --admin=admin:${ADMIN_PASSWD} > ${SERVER_LOG} 29 | echo "running server..." 30 | ${HACKAGE_SERVER} run -v3 --port=${SERVER_PORT} --static=${STATIC_DIR} --state=${STATE_DIR} --delay-cache-updates=60 +RTS -N${CORES} >> ${SERVER_LOG} 2>&1 & 31 | 32 | echo "Waiting a sec for the server to start..." 33 | sleep 2 34 | 35 | echo "Making 'admin' user a member of the mirrorers group" 36 | curl -u admin:${ADMIN_PASSWD} -X PUT ${SERVER_URL}/packages/mirrorers/user/admin > ${IMPORT_LOG} 2>&1 37 | echo "Making 'admin' user a member of the trustees group" 38 | curl -u admin:${ADMIN_PASSWD} -X PUT ${SERVER_URL}/packages/trustees/user/admin >> ${IMPORT_LOG} 2>&1 39 | 40 | echo "importing users..." 41 | time ${HACKAGE_IMPORT} users ${SERVER_URL} --htpasswd=${IMPORTDATA_DIR}/passwd/hackage.htpasswd --all-uploaders --addresses=${IMPORTDATA_DIR}/passwd/hackage.addresses --jobs=${JOBS} >> ${IMPORT_LOG} 42 | 43 | echo "importing package metadata..." 44 | time ${HACKAGE_IMPORT} metadata ${SERVER_URL} --index=${IMPORTDATA_DIR}/archive/00-index.tar.gz --jobs=${JOBS} >> ${IMPORT_LOG} 45 | 46 | echo "importing package owner data..." 47 | time ${HACKAGE_IMPORT} metadata ${SERVER_URL} --upload-log=${IMPORTDATA_DIR}/archive/log --jobs=${JOBS} >> ${IMPORT_LOG} 48 | 49 | echo "importing package tarballs..." 50 | time find ${IMPORTDATA_DIR}/archive -name '*.tar.gz' -print0 | xargs -0 \ 51 | ${HACKAGE_IMPORT} tarball ${SERVER_URL} --jobs=${JOBS} >> ${IMPORT_LOG} 52 | 53 | echo "importing package documentation..." 54 | time find ${IMPORTDATA_DIR}/docs -name '*-docs.tar.gz' -print0 | xargs -0 \ 55 | ${HACKAGE_IMPORT} docs ${SERVER_URL} --jobs=${JOBS} >> ${IMPORT_LOG} 56 | 57 | echo "importing package deprecation info..." 58 | time find ${IMPORTDATA_DIR}/archive -name 'tags' -print0 | xargs -0 \ 59 | ${HACKAGE_IMPORT} deprecation ${SERVER_URL} >> ${IMPORT_LOG} 60 | 61 | echo "importing distro info..." 62 | time ${HACKAGE_IMPORT} distro ${SERVER_URL} ${IMPORTDATA_DIR}/archive/00-distromap/* >> ${IMPORT_LOG} 63 | 64 | echo "importing download counts..." 65 | time ${HACKAGE_IMPORT} downloads ${SERVER_URL} ${IMPORTDATA_DIR}/download-logs/*.gz >> ${IMPORT_LOG} 66 | 67 | echo "Checkpointing server state..." 68 | kill -USR1 `pidof hackage-server` 69 | echo "Waiting..." 70 | sleep 30 71 | echo "Shutting down server..." 72 | kill `pidof hackage-server` 73 | -------------------------------------------------------------------------------- /Distribution/Client/Cron.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable #-} 2 | module Distribution.Client.Cron 3 | ( cron 4 | , Signal(..) 5 | , ReceivedSignal(..) 6 | , rethrowSignalsAsExceptions 7 | ) where 8 | 9 | import Control.Monad (forM_) 10 | import Control.Exception (Exception) 11 | import Control.Concurrent (myThreadId, threadDelay, throwTo) 12 | import System.Random (randomRIO) 13 | import System.Locale (defaultTimeLocale) 14 | import Data.Time.Format (formatTime) 15 | import Data.Time.Clock (UTCTime, getCurrentTime, addUTCTime) 16 | import Data.Time.LocalTime (getCurrentTimeZone, utcToZonedTime) 17 | import Data.Typeable (Typeable) 18 | 19 | import qualified System.Posix.Signals as Posix 20 | 21 | import Distribution.Verbosity (Verbosity) 22 | import Distribution.Simple.Utils hiding (warn) 23 | 24 | data ReceivedSignal = ReceivedSignal Signal UTCTime 25 | deriving (Show, Typeable) 26 | 27 | data Signal = SIGABRT 28 | | SIGINT 29 | | SIGQUIT 30 | | SIGTERM 31 | deriving (Show, Typeable) 32 | 33 | instance Exception ReceivedSignal 34 | 35 | 36 | -- | "Re"throw signals as exceptions to the invoking thread 37 | rethrowSignalsAsExceptions :: [Signal] -> IO () 38 | rethrowSignalsAsExceptions signals = do 39 | tid <- myThreadId 40 | forM_ signals $ \s -> 41 | let handler = do 42 | time <- getCurrentTime 43 | throwTo tid (ReceivedSignal s time) 44 | in Posix.installHandler (toPosixSignal s) (Posix.Catch handler) Nothing 45 | 46 | toPosixSignal :: Signal -> Posix.Signal 47 | toPosixSignal SIGABRT = Posix.sigABRT 48 | toPosixSignal SIGINT = Posix.sigINT 49 | toPosixSignal SIGQUIT = Posix.sigQUIT 50 | toPosixSignal SIGTERM = Posix.sigTERM 51 | 52 | -- | @cron verbosity interval act@ runs @act@ over and over with 53 | -- the specified interval. 54 | cron :: Verbosity -> Int -> (a -> IO a) -> (a -> IO ()) 55 | cron verbosity interval action x = do 56 | x' <- action x 57 | 58 | interval' <- pertabate interval 59 | logNextSyncMessage interval' 60 | wait interval' 61 | cron verbosity interval action x' 62 | 63 | where 64 | -- to stop all mirror clients hitting the server at exactly the same time 65 | -- we randomly adjust the wait time by +/- 10% 66 | pertabate i = let deviation = i `div` 10 67 | in randomRIO (i + deviation, i - deviation) 68 | 69 | -- Annoyingly, threadDelay takes an Int number of microseconds, so we cannot 70 | -- wait much longer than an hour. So have to wait repeatedly. Sigh. 71 | wait minutes | minutes > 60 = do threadDelay (60 * 60 * 1000000) 72 | wait (minutes - 60) 73 | | otherwise = threadDelay (minutes * 60 * 1000000) 74 | 75 | logNextSyncMessage minutes = do 76 | now <- getCurrentTime 77 | tz <- getCurrentTimeZone 78 | let nextSync = addUTCTime (fromIntegral (60 * minutes)) now 79 | notice verbosity $ 80 | "Next try will be in " ++ show minutes ++ " minutes, at " 81 | ++ formatTime defaultTimeLocale "%R %Z" (utcToZonedTime tz nextSync) 82 | -------------------------------------------------------------------------------- /tests/MailUtils.hs: -------------------------------------------------------------------------------- 1 | {- 2 | In order to test Hackage, we need to be able to send check for confirmation 3 | emails. In this module we provide a simple interface to do that. 4 | 5 | Currently we use mailinator, but the API is designed to be agnostic to the 6 | specific mail service used. 7 | -} 8 | 9 | module MailUtils ( 10 | Email(..) 11 | , testEmailAddress 12 | , checkEmail 13 | , getEmail 14 | , emailWithSubject 15 | , waitForEmailWithSubject 16 | ) where 17 | 18 | import Control.Concurrent (threadDelay) 19 | import Data.Maybe 20 | import Network.URI 21 | import Network.HTTP hiding (user) 22 | 23 | import qualified Text.XML.Light as XML 24 | 25 | import HttpUtils 26 | import Util 27 | 28 | testEmailAddress :: String -> String 29 | testEmailAddress user = user ++ "@mailinator.com" 30 | 31 | data Email = Email { 32 | emailTitle :: String 33 | , emailLink :: URI 34 | , emailSender :: String 35 | , emailDate :: String 36 | } 37 | deriving Show 38 | 39 | checkEmail :: String -> IO [Email] 40 | checkEmail user = do 41 | raw <- execRequest NoAuth (getRequest rssUrl) 42 | let rss = XML.onlyElems (XML.parseXML raw) 43 | items = concatMap (XML.filterElementsName $ simpleName "item") rss 44 | return (map parseEmail items) 45 | where 46 | rssUrl = "http://www.mailinator.com/feed?to=" ++ user 47 | 48 | parseEmail :: XML.Element -> Email 49 | parseEmail e = 50 | let [title] = XML.filterElementsName (simpleName "title") e 51 | [link] = XML.filterElementsName (simpleName "link") e 52 | [sender] = XML.filterElementsName (simpleName "creator") e 53 | [date] = XML.filterElementsName (simpleName "date") e 54 | in Email { emailTitle = XML.strContent title 55 | , emailLink = fromJust . parseURI . XML.strContent $ link 56 | , emailSender = XML.strContent sender 57 | , emailDate = XML.strContent date 58 | } 59 | 60 | simpleName :: String -> XML.QName -> Bool 61 | simpleName n = (== n) . XML.qName 62 | 63 | emailWithSubject :: String -> String -> IO (Maybe Email) 64 | emailWithSubject user subject = do 65 | emails <- checkEmail user 66 | return . listToMaybe . filter ((== subject) . emailTitle) $ emails 67 | 68 | waitForEmailWithSubject :: String -> String -> IO Email 69 | waitForEmailWithSubject user subject = f 10 70 | where 71 | f :: Int -> IO Email 72 | f n = do 73 | info $ "Waiting for confirmation email at " ++ testEmailAddress user 74 | mEmail <- emailWithSubject user subject 75 | case mEmail of 76 | Just email -> return email 77 | Nothing | n == 0 -> die "Didn't get confirmation email" 78 | | otherwise -> do 79 | info "No confirmation email yet; will try again in 30 sec" 80 | threadDelay (30 * 1000000) 81 | f (n - 1) 82 | 83 | getEmail :: Email -> IO String 84 | getEmail email = execRequest NoAuth (getRequest url) 85 | where 86 | msgid = fromJust . lookup "msgid" . parseQuery . uriQuery . emailLink $ email 87 | url = "http://mailinator.com/rendermail.jsp?msgid=" ++ msgid ++ "&text=true" 88 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Template.hs: -------------------------------------------------------------------------------- 1 | -- Common wrapper for HTML pages 2 | module Distribution.Server.Pages.Template 3 | ( hackagePage 4 | , hackagePageWith 5 | , hackagePageWithHead 6 | ) where 7 | 8 | import Text.XHtml.Strict 9 | 10 | --TODO: replace all this with external templates 11 | 12 | -- | Create top-level HTML document by wrapping the Html with boilerplate. 13 | hackagePage :: String -> [Html] -> Html 14 | hackagePage = hackagePageWithHead [] 15 | 16 | hackagePageWithHead :: [Html] -> String -> [Html] -> Html 17 | hackagePageWithHead headExtra docTitle docContent = 18 | hackagePageWith headExtra docTitle docSubtitle docContent bodyExtra 19 | where 20 | docSubtitle = anchor ! [href introductionURL] << "Hackage :: [Package]" 21 | bodyExtra = [] 22 | 23 | hackagePageWith :: [Html] -> String -> Html -> [Html] -> [Html] -> Html 24 | hackagePageWith headExtra docTitle docSubtitle docContent bodyExtra = 25 | toHtml [ header << (docHead ++ headExtra) 26 | , body << (docBody ++ bodyExtra) ] 27 | where 28 | docHead = [ thetitle << ("Hackage: " ++ docTitle) 29 | , thelink ! [ rel "stylesheet" 30 | , href stylesheetURL 31 | , thetype "text/css"] << noHtml 32 | -- if Search is enabled 33 | , thelink ! [ rel "search", href "/packages/opensearch.xml" 34 | , thetype "application/opensearchdescription+xml" 35 | , title "Hackage" ] << noHtml 36 | ] 37 | docBody = [ thediv ! [identifier "page-header"] << docHeader 38 | , thediv ! [identifier "content"] << docContent ] 39 | docHeader = [ navigationBar 40 | , paragraph ! [theclass "caption"] << docSubtitle ] 41 | 42 | navigationBar :: Html 43 | navigationBar = 44 | ulist ! [theclass "links", identifier "page-menu"] 45 | << map (li <<) 46 | [ anchor ! [href introductionURL] << "Home" 47 | , form ! [action "/packages/search", theclass "search", method "get"] 48 | << [ button ! [thetype "submit"] << "Search", spaceHtml 49 | , input ! [thetype "text", name "terms" ] ] 50 | , anchor ! [href pkgListURL] << "Browse" 51 | , anchor ! [href recentAdditionsURL] << "What's new" 52 | , anchor ! [href uploadURL] << "Upload" 53 | , anchor ! [href accountsURL] << "User accounts" 54 | ] 55 | 56 | stylesheetURL :: URL 57 | stylesheetURL = "/static/hackage.css" 58 | 59 | -- URL of the package list 60 | pkgListURL :: URL 61 | pkgListURL = "/packages/" 62 | 63 | -- URL of the upload form 64 | introductionURL :: URL 65 | introductionURL = "/" 66 | 67 | -- URL of the upload form 68 | uploadURL :: URL 69 | uploadURL = "/upload" 70 | 71 | -- URL about user accounts, including the form to change passwords 72 | accountsURL :: URL 73 | accountsURL = "/accounts" 74 | 75 | -- URL of the admin front end 76 | adminURL :: URL 77 | adminURL = "/admin" 78 | 79 | -- URL of the list of recent additions to the database 80 | recentAdditionsURL :: URL 81 | recentAdditionsURL = "/packages/recent" 82 | 83 | -------------------------------------------------------------------------------- /Distribution/Server/Util/Index.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE PatternGuards #-} 2 | ----------------------------------------------------------------------------- 3 | -- | 4 | -- Module : Distribution.Server.Util.Index 5 | -- Copyright : (c) Duncan Coutts 2008 6 | -- License : BSD-like 7 | -- 8 | -- Maintainer : duncan@haskell.org 9 | -- Stability : provisional 10 | -- Portability : portable 11 | -- 12 | -- Extra utils related to the package indexes. 13 | ----------------------------------------------------------------------------- 14 | module Distribution.Server.Util.Index ( 15 | read, 16 | write, 17 | ) where 18 | 19 | import qualified Codec.Archive.Tar as Tar 20 | ( read, write, Entries(..) ) 21 | import qualified Codec.Archive.Tar.Entry as Tar 22 | ( Entry(..), entryPath, fileEntry, toTarPath ) 23 | 24 | import Distribution.Package 25 | import Distribution.Version 26 | import Distribution.Server.Packages.PackageIndex (PackageIndex) 27 | import qualified Distribution.Server.Packages.PackageIndex as PackageIndex 28 | import Distribution.Text 29 | ( display, simpleParse ) 30 | 31 | import Data.ByteString.Lazy (ByteString) 32 | import System.FilePath.Posix 33 | ( (), (<.>), splitDirectories, normalise ) 34 | import Prelude hiding (read) 35 | 36 | -- | Parse an uncompressed tar repository index file from a 'ByteString'. 37 | -- 38 | -- Takes a function to turn a tar entry into a package 39 | -- 40 | -- This fails only if the tar is corrupted. Any entries not recognized as 41 | -- belonging to a package are ignored. 42 | -- 43 | read :: (PackageIdentifier -> Tar.Entry -> pkg) 44 | -> ByteString 45 | -> Either String [pkg] 46 | read mkPackage indexFileContent = collect [] entries 47 | where 48 | entries = Tar.read indexFileContent 49 | collect es' Tar.Done = Right es' 50 | collect es' (Tar.Next e es) = case entry e of 51 | Just e' -> collect (e':es') es 52 | Nothing -> collect es' es 53 | collect _ (Tar.Fail err) = Left (show err) 54 | 55 | entry e 56 | | [pkgname,versionStr,_] <- splitDirectories (normalise (Tar.entryPath e)) 57 | , Just version <- simpleParse versionStr 58 | , [] <- versionTags version 59 | = let pkgid = PackageIdentifier (PackageName pkgname) version 60 | in Just (mkPackage pkgid e) 61 | entry _ = Nothing 62 | 63 | -- | Create an uncompressed tar repository index file as a 'ByteString'. 64 | -- 65 | -- Takes a couple functions to turn a package into a tar entry. Extra 66 | -- entries are also accepted. 67 | -- 68 | write :: Package pkg 69 | => (pkg -> ByteString) 70 | -> (pkg -> Tar.Entry -> Tar.Entry) 71 | -> [Tar.Entry] 72 | -> PackageIndex pkg 73 | -> ByteString 74 | write externalPackageRep updateEntry extras = 75 | Tar.write . (extras++) . map entry . PackageIndex.allPackages 76 | where 77 | entry pkg = updateEntry pkg 78 | . Tar.fileEntry tarPath 79 | $ externalPackageRep pkg 80 | where 81 | Right tarPath = Tar.toTarPath False fileName 82 | PackageName name = packageName pkg 83 | fileName = name display (packageVersion pkg) 84 | name <.> "cabal" 85 | 86 | -------------------------------------------------------------------------------- /Data/StringTable.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, TemplateHaskell #-} 2 | 3 | module Data.StringTable ( 4 | 5 | StringTable, 6 | lookup, 7 | index, 8 | construct, 9 | 10 | prop, 11 | 12 | ) where 13 | 14 | import Prelude hiding (lookup) 15 | import qualified Data.List as List 16 | import qualified Data.Array.Unboxed as A 17 | import Data.Array.Unboxed ((!)) 18 | import Data.SafeCopy (base, deriveSafeCopy) 19 | import Data.Typeable (Typeable) 20 | import qualified Data.ByteString.Char8 as BS 21 | import Data.Word (Word32) 22 | 23 | import Distribution.Server.Framework.Instances() 24 | import Distribution.Server.Framework.MemSize 25 | 26 | -- | An effecient mapping from strings to a dense set of integers. 27 | -- 28 | data StringTable id 29 | = StringTable 30 | !BS.ByteString -- all the strings concatenated 31 | !(A.UArray Int Word32) -- offset table 32 | deriving (Show, Typeable) 33 | 34 | $(deriveSafeCopy 0 'base ''StringTable) 35 | 36 | instance MemSize (StringTable id) where 37 | memSize (StringTable s o) = 3 + memSize s + memSizeUArray 4 o 38 | 39 | -- | Look up a string in the token table. If the string is present, return 40 | -- its corresponding index. 41 | -- 42 | lookup :: Enum id => StringTable id -> String -> Maybe id 43 | lookup (StringTable bs tbl) str = binarySearch 0 (topBound-1) (BS.pack str) 44 | where 45 | (0, topBound) = A.bounds tbl 46 | 47 | binarySearch a b key 48 | | a > b = Nothing 49 | | otherwise = case compare key (index' bs tbl mid) of 50 | LT -> binarySearch a (mid-1) key 51 | EQ -> Just (toEnum mid) 52 | GT -> binarySearch (mid+1) b key 53 | where mid = (a + b) `div` 2 54 | 55 | index' :: BS.ByteString -> A.UArray Int Word32 -> Int -> BS.ByteString 56 | index' bs tbl i = BS.take len . BS.drop start $ bs 57 | where 58 | start, end, len :: Int 59 | start = fromIntegral (tbl ! i) 60 | end = fromIntegral (tbl ! (i+1)) 61 | len = end - start 62 | 63 | 64 | -- | Given the index of a string in the table, return the string. 65 | -- 66 | index :: Enum id => StringTable id -> id -> String 67 | index (StringTable bs tbl) = BS.unpack . index' bs tbl . fromEnum 68 | 69 | 70 | -- | Given a list of strings, construct a 'StringTable' mapping those strings 71 | -- to a dense set of integers. 72 | -- 73 | construct :: Enum id => [String] -> StringTable id 74 | construct strs = StringTable bs tbl 75 | where 76 | bs = BS.pack (concat strs') 77 | tbl = A.array (0, length strs') (zip [0..] offsets) 78 | offsets = scanl (\off str -> off + fromIntegral (length str)) 0 strs' 79 | strs' = map head . List.group . List.sort $ strs 80 | 81 | 82 | enumStrings :: Enum id => StringTable id -> [String] 83 | enumStrings (StringTable bs tbl) = map (BS.unpack . index' bs tbl) [0..h-1] 84 | where (0,h) = A.bounds tbl 85 | 86 | 87 | enumIds :: Enum id => StringTable id -> [id] 88 | enumIds (StringTable _ tbl) = map toEnum [0..h-1] 89 | where (0,h) = A.bounds tbl 90 | 91 | prop :: [String] -> Bool 92 | prop strs = 93 | all lookupIndex (enumStrings tbl) 94 | && all indexLookup (enumIds tbl) 95 | 96 | where 97 | tbl :: StringTable Int 98 | tbl = construct strs 99 | 100 | lookupIndex str = index tbl ident == str 101 | where Just ident = lookup tbl str 102 | 103 | indexLookup ident = lookup tbl str == Just ident 104 | where str = index tbl ident 105 | -------------------------------------------------------------------------------- /datafiles/templates/index.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: introduction 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |
12 |

About Hackage

13 | 14 |

Hackage is a collection of released 15 | Haskell packages. 16 | Each package is in the Cabal format, 17 | a standard way of packaging Haskell source code that makes it easy to build 18 | and install. 19 | (Hackage and Cabal are components of a broader Haskell infrastructure effort 20 | called Hackage.) 21 | These pages are a basic web interface to the Hackage package database. 22 |

23 | 24 |

This isn't the official Hackage. It's an in-progress 25 | rewrite of the server using the happstack web 26 | framework. 27 |

28 | 29 |

Finding packages

30 | 31 |

The Packages link above lists the available packages 32 | and provides a full text search of the package pages (via Google), 33 | while What's new lists recent additions (also available 34 | as an RSS feed). You can also do a simple text 35 | search of package descriptions: 36 |

37 | 38 |
39 |

40 | 41 | 42 |

43 |
44 | 45 |

There are a few other package indices:

46 | 55 | 56 |

See How to install a Cabal package for instructions on installing the packages you find here.

57 | 58 |

Releasing packages through Hackage

59 |

To upload your own releases, you'll first need to package them as Cabal 60 | source packages. 61 | See How to create a Haskell package 62 | for a tutorial. 63 | You can check and upload your package using the Upload link above, 64 | though you'll need a Hackage username and password. 65 |

66 | 67 |

Getting the raw data

68 | 71 | 72 |

Development

73 |

See the 74 | Hackage wiki page. 75 | There is also a document in progress summarizing the architecture of the new hackage-server. Check out the code yourself: 76 |

77 | 78 |
darcs get http://code.haskell.org/hackage-server
79 | 80 |

81 | We'd like to make it as simple as possible for anyone to set up a Hackage server. 82 |

83 |
84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # hackage-server 2 | 3 | [![Build Status](https://travis-ci.org/haskell/hackage-server.png?branch=master)](https://travis-ci.org/haskell/hackage-server) 4 | 5 | This is the `hackage-server` code. This is what powers , and many other private hackage instances. 6 | 7 | ## Running 8 | 9 | cabal install 10 | 11 | hackage-server init 12 | hackage-server run 13 | 14 | By default the server runs on port `8080` with the following settings: 15 | 16 | URL: http://localhost:8080/ 17 | username: admin 18 | password: admin 19 | 20 | To specify something different, see `hackage-server init --help` for details. 21 | 22 | The server can be stopped by using `Control-C`. 23 | 24 | This will save the current state and shutdown cleanly. Running again 25 | will resume with the same state. 26 | 27 | ### Resetting 28 | 29 | To reset everything, kill the server and delete the server state: 30 | 31 | ```bash 32 | rm -rf state/ 33 | ``` 34 | 35 | Note that the `datafiles/` and `state/` directories differ: 36 | `datafiles` is for static html, templates and other files. 37 | The `state` directory holds the database (using `acid-state` 38 | and a separate blob store). 39 | 40 | ### Creating users & uploading packages 41 | 42 | * Admin front-end: 43 | * List of users: 44 | * Register new users: 45 | 46 | Currently there is no restriction on registering, but only an admin 47 | user can grant privileges to registered users e.g. by adding them to 48 | other groups. In particular there are groups: 49 | 50 | * admins `http://localhost:8080/users/admins/` -- administrators can 51 | do things with user accounts like disabling, deleting, changing 52 | other groups etc. 53 | * trustees `http://localhost:8080/packages/trustees/` -- trustees can 54 | do janitorial work on all packages 55 | * mirrors `http://localhost:8080/packages/mirrorers/` -- for special 56 | mirroring clients that are trusted to upload packages 57 | * per-package maintainer groups 58 | `http://localhost:8080/package/foo/maintainers` -- users allowed to 59 | upload packages 60 | * uploaders `http://localhost:8080/packages/uploaders/` -- for 61 | uploading new packages 62 | 63 | ### Mirroring 64 | 65 | There is a program included in the hackage-server package called 66 | hackage-mirror. It's intended to enable syncing all the packages from 67 | one server to another, e.g. getting all the packages from the old 68 | hackage and uploading them to a local instance of a hackage-server. 69 | 70 | To try it out: 71 | 72 | 1. Add a user to the mirrorers group via 73 | http://localhost:8080/packages/mirrorers/ 74 | 1. Create a config file that contains the local and remote 75 | server. Assuming you are cloning the packages on 76 | locally, you could create a config 77 | file as follows: 78 | 79 | ```bash 80 | echo -e "http://hackage.haskell.org\nhttp://admin:admin@localhost:8080/" > servers.cfg 81 | ``` 82 | 83 | 1. Run the client, pointing to the config file: 84 | 85 | ```bash 86 | hackage-mirror servers.cfg 87 | ``` 88 | 89 | This will do a one-time sync, and will bail out at the first sign of 90 | trouble. You can also do more robust and continuous mirroring. Use the 91 | flag `--continuous`. It will sync every 30 minutes (configurable with 92 | `--interval`). In this mode it carries on even when some packages 93 | cannot be mirrored for some reason and remembers them so it doesn't 94 | try them again and again. You can force it to try again by deleting 95 | the state files it mentions. 96 | -------------------------------------------------------------------------------- /Distribution/Client/UserAddressesDb.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns, PatternGuards #-} 2 | -- | Parsing @hackage.addresses@ files 3 | -- 4 | module Distribution.Client.UserAddressesDb ( 5 | UserAddressesDb, 6 | UserEntry, 7 | parseFile 8 | ) where 9 | 10 | import Distribution.Server.Users.Types (UserName(..)) 11 | import Data.List 12 | import Data.ByteString (ByteString) 13 | import qualified Data.ByteString.Char8 as BS -- Mixed encoding in old DB; Char8 intended 14 | import Data.Text (Text) 15 | import qualified Data.Text as T 16 | import qualified Data.Text.Encoding as T 17 | import qualified Data.Text.Encoding.Error as T 18 | import qualified Data.Text.Read as T 19 | import Data.Functor 20 | import Data.Char (chr) 21 | import Data.Time (UTCTime, parseTime, zonedTimeToUTC) 22 | import System.Locale (defaultTimeLocale) 23 | 24 | type UserAddressesDb = [UserEntry] 25 | type UserEntry = (UserName, UserRealName, UserAddress, UTCTime, UserName) 26 | type UserRealName = Text 27 | type UserAddress = Text 28 | 29 | parseFile :: FilePath -> IO (Either String UserAddressesDb) 30 | parseFile fn = parse <$> BS.readFile fn 31 | 32 | parse :: ByteString -> Either String UserAddressesDb 33 | parse = accum 0 [] . map parseLine . BS.lines 34 | where 35 | accum _ entries [] = Right (reverse entries) 36 | accum !n entries (Right entry:rest) = accum (n+1) (entry:entries) rest 37 | accum n _ (Left line :_ ) = Left errmsg 38 | where 39 | errmsg = "parse error in addresses file on line " ++ show (n :: Int) 40 | ++ "\n" ++ BS.unpack line 41 | 42 | parseLine :: ByteString -> Either ByteString UserEntry 43 | parseLine line 44 | -- entries like: 45 | -- DuncanCoutts:Duncan Coutts:duncan.coutts@worc.ox.ac.uk:RossPaterson:Wed Jan 10 16:00:00 PDT 2007 46 | | [username,realname,email,adminname,timestr] <- splitFields line 47 | , Just timestamp <- readTime (BS.unpack timestr) 48 | = Right ( UserName (BS.unpack username) 49 | , decodeMixed realname 50 | , decodeMixed email 51 | , timestamp 52 | , UserName (BS.unpack adminname) ) 53 | | otherwise 54 | = Left line 55 | 56 | where 57 | splitFields = fixTimeBreakage . fixUrlBreakage . splitOn ':' 58 | where 59 | fixUrlBreakage [] = [] 60 | fixUrlBreakage (f:f':fs) | f == BS.pack "http" 61 | = BS.concat [f, BS.singleton ':', f'] 62 | : fixUrlBreakage fs 63 | fixUrlBreakage (f:fs) = f : fixUrlBreakage fs 64 | 65 | fixTimeBreakage [a,b,c,d,t1,t2,t3] = 66 | [a,b,c,d, BS.intercalate (BS.singleton ':') [t1,t2,t3] ] 67 | fixTimeBreakage fs = fs 68 | 69 | readTime = fmap zonedTimeToUTC 70 | . parseTime defaultTimeLocale "%c" 71 | 72 | -- Unfortunately the file uses mixed encoding, mostly UTF8 73 | -- but some Latin1 and some Html escape sequences 74 | decodeMixed :: ByteString -> Text 75 | decodeMixed bs 76 | | T.any ('\xFFFD' ==) astext = unescape (T.pack (BS.unpack bs)) 77 | | otherwise = unescape astext 78 | where 79 | astext = T.decodeUtf8With T.lenientDecode bs 80 | 81 | -- unescape things like "ğ" 82 | unescape :: Text -> Text 83 | unescape s 84 | | let (s0,s1) = T.breakOn (T.pack "&#") s 85 | , not (T.null s1) 86 | , Right (n,s2) <- T.decimal (T.drop 2 s1) 87 | = T.append s0 (T.cons (chr n) (T.drop 1 (unescape s2))) 88 | 89 | | otherwise = s 90 | 91 | splitOn :: Char -> ByteString -> [ByteString] 92 | splitOn c = unfoldr $ \s -> if BS.null s then Nothing 93 | else case BS.break (==c) s of 94 | (x,s') -> Just (x, BS.drop 1 s') 95 | 96 | -------------------------------------------------------------------------------- /Distribution/Client/UploadLog.hs: -------------------------------------------------------------------------------- 1 | ----------------------------------------------------------------------------- 2 | -- | 3 | -- Module : ImportClient.UploadLog 4 | -- Copyright : (c) Ross Paterson 2007 5 | -- Duncan Coutts 2008 6 | -- License : BSD-like 7 | -- 8 | -- Maintainer : duncan@community.haskell.org 9 | -- Stability : provisional 10 | -- Portability : portable 11 | -- 12 | -- Support for reading the upload log of the old hackage server. 13 | ----------------------------------------------------------------------------- 14 | module Distribution.Client.UploadLog ( 15 | Entry(..), 16 | read, 17 | collectUploadInfo, 18 | collectMaintainerInfo, 19 | ) where 20 | 21 | import Distribution.Server.Users.Types 22 | ( UserName ) 23 | 24 | import Distribution.Package 25 | ( PackageId, PackageName, packageName, PackageIdentifier(..)) 26 | import Distribution.Text 27 | ( Text(..), simpleParse ) 28 | import Distribution.ParseUtils ( parsePackageNameQ ) 29 | import qualified Distribution.Compat.ReadP as Parse 30 | import qualified Text.PrettyPrint as Disp 31 | import Text.PrettyPrint 32 | ( (<+>) ) 33 | import Distribution.Simple.Utils 34 | ( comparing, equating ) 35 | 36 | import Data.Time.Clock 37 | ( UTCTime ) 38 | import Data.Time.LocalTime 39 | ( zonedTimeToUTC ) 40 | import Data.Time.Format 41 | ( readsTime, formatTime ) 42 | import System.Locale 43 | ( defaultTimeLocale ) 44 | import Data.List 45 | ( sortBy, groupBy, nub ) 46 | 47 | import Prelude hiding (read) 48 | 49 | data Entry = Entry UTCTime UserName PackageIdentifier 50 | deriving (Eq, Ord, Show) 51 | 52 | instance Text Entry where 53 | disp (Entry time user pkgid) = 54 | Disp.text (formatTime defaultTimeLocale "%c" time) 55 | <+> disp user <+> disp pkgid 56 | parse = do 57 | time <- Parse.readS_to_P (readsTime defaultTimeLocale "%c") 58 | Parse.skipSpaces 59 | user <- parse 60 | Parse.skipSpaces 61 | pkg <- parsePackageNameQ 62 | Parse.skipSpaces 63 | ver <- parse 64 | let pkgid = PackageIdentifier pkg ver 65 | return (Entry (zonedTimeToUTC time) user pkgid) 66 | 67 | -- | Returns a list of log entries, however some packages have been uploaded 68 | -- more than once, so each entry is paired with any older entries for the same 69 | -- package. 70 | -- 71 | read :: String -> Either String [Entry] 72 | read = check [] . map parseLine . lines 73 | where 74 | check es' [] = Right (reverse es') 75 | check es' (Right e:es) = check (e:es') es 76 | check _ (Left err:_) = Left err 77 | parseLine line = maybe (Left err) Right (simpleParse line) 78 | where err = "Failed to parse log line:\n" ++ show line 79 | 80 | collectUploadInfo :: [Entry] -> [(PackageId, UTCTime, UserName)] 81 | collectUploadInfo = 82 | map (uploadInfo . sortBy (comparing entryTime)) 83 | . groupBy (equating entryPackageId) 84 | . sortBy (comparing entryPackageId) 85 | where 86 | entryPackageId (Entry _ _ pkgid) = pkgid 87 | entryTime (Entry t _ _) = t 88 | 89 | uploadInfo :: [Entry] -> (PackageId, UTCTime, UserName) 90 | uploadInfo entries = 91 | case last entries of 92 | Entry time uname pkgid -> (pkgid, time, uname) 93 | 94 | collectMaintainerInfo :: [Entry] -> [(PackageName, [UserName])] 95 | collectMaintainerInfo = 96 | map maintainersInfo 97 | . groupBy (equating entryPackageName) 98 | . sortBy (comparing entryPackageName) 99 | where 100 | entryPackageName (Entry _ _ pkgid) = packageName pkgid 101 | 102 | maintainersInfo :: [Entry] -> (PackageName, [UserName]) 103 | maintainersInfo entries = 104 | (packageName pkgid, maintainers) 105 | where 106 | Entry _ _ pkgid = head entries 107 | maintainers = nub [ uname | Entry _ uname _ <- entries ] 108 | 109 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Search/ExtractDescriptionTerms.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns, NamedFieldPuns, GeneralizedNewtypeDeriving #-} 2 | 3 | module Distribution.Server.Features.Search.ExtractDescriptionTerms ( 4 | extractSynopsisTerms, 5 | extractDescriptionTerms 6 | ) where 7 | 8 | import Data.Text (Text) 9 | import qualified Data.Text as T 10 | import Data.Set (Set) 11 | import qualified Data.Set as Set 12 | import Data.Char 13 | import qualified NLP.Tokenize as NLP 14 | import qualified NLP.Snowball as NLP 15 | import Control.Monad ((>=>)) 16 | import Data.Maybe 17 | 18 | import Distribution.Server.Pages.Package.HaddockHtml as Haddock (markup) 19 | import Distribution.Server.Pages.Package.HaddockTypes as Haddock 20 | import qualified Distribution.Server.Pages.Package.HaddockParse as Haddock (parseHaddockParagraphs) 21 | import qualified Distribution.Server.Pages.Package.HaddockLex as Haddock (tokenise) 22 | 23 | 24 | extractSynopsisTerms :: Set Text -> String -> [Text] 25 | extractSynopsisTerms stopWords = 26 | NLP.stems NLP.English 27 | . filter (`Set.notMember` stopWords) 28 | . map (T.toCaseFold . T.pack) 29 | . concatMap splitTok 30 | . filter (not . ignoreTok) 31 | . NLP.tokenize 32 | 33 | 34 | ignoreTok :: String -> Bool 35 | ignoreTok = all isPunctuation 36 | 37 | splitTok :: String -> [String] 38 | splitTok tok = 39 | case go tok of 40 | toks@(_:_:_) -> tok:toks 41 | toks -> toks 42 | where 43 | go remaining = 44 | case break (\c -> c == ')' || c == '-' || c == '/') remaining of 45 | ([], _:trailing) -> go trailing 46 | (leading, _:trailing) -> leading : go trailing 47 | ([], []) -> [] 48 | (leading, []) -> leading : [] 49 | 50 | 51 | extractDescriptionTerms :: Set Text -> String -> [Text] 52 | extractDescriptionTerms stopWords = 53 | NLP.stems NLP.English 54 | . filter (`Set.notMember` stopWords) 55 | . map (T.toCaseFold . T.pack) 56 | . maybe 57 | [] --TODO: something here 58 | ( filter (not . ignoreTok) 59 | . NLP.tokenize 60 | . concat . markup termsMarkup) 61 | . (Haddock.tokenise >=> Haddock.parseHaddockParagraphs) 62 | 63 | termsMarkup :: DocMarkup String [String] 64 | termsMarkup = Markup { 65 | markupEmpty = [], 66 | markupString = \s -> [s], 67 | markupParagraph = id, 68 | markupAppend = (++), 69 | markupIdentifier = \s -> [s], 70 | markupModule = const [], -- i.e. filter these out 71 | markupEmphasis = id, 72 | markupMonospaced = \s -> if length s > 1 then [] else s, 73 | markupUnorderedList = concat, 74 | markupOrderedList = concat, 75 | markupDefList = concatMap (\(d,t) -> d ++ t), 76 | markupCodeBlock = const [], 77 | markupHyperlink = \(Hyperlink _url mLabel) -> maybeToList mLabel, 78 | --TODO: extract main part of hostname 79 | markupPic = const [], 80 | markupAName = const [] 81 | } 82 | 83 | {- 84 | ------------------- 85 | -- Main experiment 86 | -- 87 | 88 | main = do 89 | pkgsFile <- readFile "pkgs" 90 | let mostFreq :: [String] 91 | pkgs :: [PackageDescription] 92 | (mostFreq, pkgs) = read pkgsFile 93 | 94 | stopWordsFile <- T.readFile "stopwords.txt" 95 | -- wordsFile <- T.readFile "/usr/share/dict/words" 96 | -- let ws = Set.fromList (map T.toLower $ T.lines wordsFile) 97 | 98 | 99 | print "reading file" 100 | evaluate (length mostFreq + length pkgs) 101 | print "done" 102 | 103 | let stopWords = Set.fromList $ T.lines stopWordsFile 104 | print stopWords 105 | 106 | sequence_ 107 | [ putStrLn $ display (packageName pkg) ++ ": " 108 | ++ --intercalate ", " 109 | (description pkg) ++ "\n" 110 | ++ intercalate ", " 111 | (map T.unpack $ extractDescriptionTerms stopWords (description pkg)) ++ "\n" 112 | | pkg <- pkgs 113 | , let pkgname = display (packageName pkg) ] 114 | -} 115 | -------------------------------------------------------------------------------- /Distribution/Server/Users/Group.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, ExistentialQuantification #-} 2 | module Distribution.Server.Users.Group ( 3 | UserList(..), 4 | UserGroup(..), 5 | GroupDescription(..), 6 | nullDescription, 7 | groupName, 8 | empty, 9 | add, 10 | remove, 11 | member, 12 | enumerate, 13 | fromList, 14 | unions, 15 | queryGroups 16 | ) where 17 | 18 | import Distribution.Server.Users.Types 19 | import Distribution.Server.Framework.MemSize 20 | 21 | import qualified Data.IntSet as IntSet 22 | import Data.Monoid (Monoid) 23 | import Data.SafeCopy (SafeCopy(..), contain) 24 | import qualified Data.Serialize as Serialize 25 | import Data.Typeable (Typeable) 26 | import Control.DeepSeq 27 | import Control.Applicative ((<$>)) 28 | 29 | import Prelude hiding (id) 30 | 31 | -- | Some subset of users, eg those allowed to perform some action. 32 | -- 33 | newtype UserList = UserList IntSet.IntSet 34 | deriving (Eq, Monoid, Typeable, Show, MemSize) 35 | 36 | empty :: UserList 37 | empty = UserList IntSet.empty 38 | 39 | add :: UserId -> UserList -> UserList 40 | add (UserId id) (UserList group) = UserList (IntSet.insert id group) 41 | 42 | remove :: UserId -> UserList -> UserList 43 | remove (UserId id) (UserList group) = UserList (IntSet.delete id group) 44 | 45 | member :: UserId -> UserList -> Bool 46 | member (UserId id) (UserList group) = IntSet.member id group 47 | 48 | enumerate :: UserList -> [UserId] 49 | enumerate (UserList group) = map UserId (IntSet.toList group) 50 | 51 | fromList :: [UserId] -> UserList 52 | fromList ids = UserList $ IntSet.fromList (map (\(UserId uid) -> uid) ids) 53 | 54 | unions :: [UserList] -> UserList 55 | unions groups = UserList (IntSet.unions [ group | UserList group <- groups ]) 56 | 57 | -- | An abstraction over a UserList for dynamically querying and modifying 58 | -- a user group. 59 | -- 60 | -- This structure is not only meant for singleton user groups, but also collections 61 | -- of groups. Some features may provide a UserGroup parametrized by an argument. 62 | -- 63 | data UserGroup = UserGroup { 64 | -- a description of the group for display 65 | groupDesc :: GroupDescription, 66 | -- dynamic querying for its members 67 | queryUserList :: IO UserList, 68 | -- dynamically add a member (does nothing if already exists) 69 | -- creates the group if it didn't exist previously 70 | addUserList :: UserId -> IO (), 71 | -- dynamically remove a member (does nothing if not present) 72 | -- creates the group if it didn't exist previously 73 | removeUserList :: UserId -> IO (), 74 | -- user groups which can remove from one 75 | canRemoveGroup :: [UserGroup], 76 | -- user groups which can add to this one (use 'fix' to add to self) 77 | canAddGroup :: [UserGroup] 78 | } 79 | 80 | -- | A displayable description for a user group. 81 | -- 82 | -- Given a groupTitle of A and a group entity of Nothing, the group will be 83 | -- called "A"; given a groupTitle of "A" and a groupEntity of Just ("B", 84 | -- Just "C"), the title will be displayed as "A for B". 85 | data GroupDescription = GroupDescription { 86 | groupTitle :: String, 87 | groupEntity :: Maybe (String, Maybe String), 88 | groupPrologue :: String 89 | } 90 | 91 | nullDescription :: GroupDescription 92 | nullDescription = GroupDescription { groupTitle = "", groupEntity = Nothing, groupPrologue = "" } 93 | 94 | groupName :: GroupDescription -> String 95 | groupName desc = groupTitle desc ++ maybe "" (\(for, _) -> " for " ++ for) (groupEntity desc) 96 | 97 | queryGroups :: [UserGroup] -> IO UserList 98 | queryGroups = fmap unions . mapM queryUserList 99 | 100 | 101 | instance SafeCopy UserList where 102 | putCopy (UserList x) = contain $ Serialize.put x 103 | getCopy = contain $ UserList <$> Serialize.get 104 | 105 | -- for use in Caches, really... 106 | instance NFData GroupDescription where 107 | rnf (GroupDescription a b c) = rnf a `seq` rnf b `seq` rnf c 108 | 109 | instance MemSize GroupDescription where 110 | memSize (GroupDescription a b c) = memSize3 a b c 111 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Package/HaddockParse.y: -------------------------------------------------------------------------------- 1 | { 2 | -- Disable warnings that the generated code causes 3 | {-# OPTIONS_GHC -fno-warn-deprecated-flags 4 | -fno-warn-missing-signatures 5 | -fno-warn-unused-binds 6 | -fno-warn-unused-matches 7 | -fno-warn-lazy-unlifted-bindings 8 | -fno-warn-name-shadowing 9 | -fno-warn-incomplete-patterns 10 | -fno-warn-tabs #-} 11 | module Distribution.Server.Pages.Package.HaddockParse (parseHaddockParagraphs) where 12 | 13 | import Distribution.Server.Pages.Package.HaddockLex 14 | import Distribution.Server.Pages.Package.HaddockHtml 15 | import Distribution.Server.Pages.Package.HaddockTypes 16 | import Data.Char (isSpace) 17 | } 18 | 19 | %expect 0 20 | 21 | %tokentype { Token } 22 | 23 | %token 24 | '@' { TokSpecial '@' } 25 | '[' { TokDefStart } 26 | ']' { TokDefEnd } 27 | DQUO { TokSpecial '\"' } 28 | URL { TokURL $$ } 29 | PIC { TokPic $$ } 30 | ANAME { TokAName $$ } 31 | '/../' { TokEmphasis $$ } 32 | '-' { TokBullet } 33 | '(n)' { TokNumber } 34 | '>..' { TokBirdTrack $$ } 35 | IDENT { TokIdent $$ } 36 | PARA { TokPara } 37 | STRING { TokString $$ } 38 | 39 | %monad { Maybe } 40 | 41 | %name parseHaddockParagraphs doc 42 | %name parseHaddockString seq 43 | 44 | %% 45 | 46 | doc :: { Doc RdrName } 47 | : apara PARA doc { docAppend $1 $3 } 48 | | PARA doc { $2 } 49 | | apara { $1 } 50 | | {- empty -} { DocEmpty } 51 | 52 | apara :: { Doc RdrName } 53 | : ulpara { DocUnorderedList [$1] } 54 | | olpara { DocOrderedList [$1] } 55 | | defpara { DocDefList [$1] } 56 | | para { $1 } 57 | 58 | ulpara :: { Doc RdrName } 59 | : '-' para { $2 } 60 | 61 | olpara :: { Doc RdrName } 62 | : '(n)' para { $2 } 63 | 64 | defpara :: { (Doc RdrName, Doc RdrName) } 65 | : '[' seq ']' seq { ($2, $4) } 66 | 67 | para :: { Doc RdrName } 68 | : seq { docParagraph $1 } 69 | | codepara { DocCodeBlock $1 } 70 | 71 | codepara :: { Doc RdrName } 72 | : '>..' codepara { docAppend (DocString $1) $2 } 73 | | '>..' { DocString $1 } 74 | 75 | seq :: { Doc RdrName } 76 | : elem seq { docAppend $1 $2 } 77 | | elem { $1 } 78 | 79 | elem :: { Doc RdrName } 80 | : elem1 { $1 } 81 | | '@' seq1 '@' { DocMonospaced $2 } 82 | 83 | seq1 :: { Doc RdrName } 84 | : PARA seq1 { docAppend (DocString "\n") $2 } 85 | | elem1 seq1 { docAppend $1 $2 } 86 | | elem1 { $1 } 87 | 88 | elem1 :: { Doc RdrName } 89 | : STRING { DocString $1 } 90 | | '/../' { DocEmphasis (DocString $1) } 91 | | URL { DocHyperlink (makeHyperlink $1) } 92 | | PIC { DocPic $1 } 93 | | ANAME { DocAName $1 } 94 | | IDENT { DocIdentifier $1 } 95 | | DQUO strings DQUO { DocModule $2 } 96 | 97 | strings :: { String } 98 | : STRING { $1 } 99 | | STRING strings { $1 ++ $2 } 100 | 101 | { 102 | happyError :: [Token] -> Maybe a 103 | happyError toks = Nothing 104 | 105 | -- | Create a `Hyperlink` from given string. 106 | -- 107 | -- A hyperlink consists of a URL and an optional label. The label is separated 108 | -- from the url by one or more whitespace characters. 109 | makeHyperlink :: String -> Hyperlink 110 | makeHyperlink input = case break isSpace $ strip input of 111 | (url, "") -> Hyperlink url Nothing 112 | (url, label) -> Hyperlink url (Just . dropWhile isSpace $ label) 113 | 114 | -- | Remove all leading and trailing whitespace 115 | strip :: String -> String 116 | strip = dropWhile isSpace . reverse . dropWhile isSpace . reverse 117 | } 118 | -------------------------------------------------------------------------------- /tests/CreateUserTest.hs: -------------------------------------------------------------------------------- 1 | {- 2 | This a separate part of the high-level test of the hackage server 3 | (see HighLevelTest.hs). This set of tests check that the user self 4 | registration wors. This test needs local outgoing email, which isn't 5 | available on all hosts, so we keep it as a separate test. 6 | 7 | System requirements: 8 | 9 | 1. Port `testPort` (currently 8392) must be available on localhost 10 | 2. You must have sendmail configured so that it can send emails to external 11 | domains (for user registration) -- currently we use mailinator.com accounts 12 | 3. You must allow for outgoing HTTP traffic, as we POST to html5.validator.nu 13 | for HTML validation. 14 | -} 15 | 16 | module Main (main) where 17 | 18 | import Control.Exception 19 | import Control.Monad 20 | import Data.List (isInfixOf) 21 | import Data.String () 22 | import System.Directory 23 | import System.FilePath 24 | import System.IO 25 | import System.Random 26 | 27 | import MailUtils 28 | import Util 29 | import HttpUtils (Authorization(..)) 30 | import HackageClientUtils 31 | 32 | main :: IO () 33 | main = do hSetBuffering stdout LineBuffering 34 | info "Initialising" 35 | root <- getCurrentDirectory 36 | info "Setting up test directory" 37 | exists <- doesDirectoryExist (testDir root) 38 | when exists $ removeDirectoryRecursive (testDir root) 39 | createDirectory (testDir root) 40 | (setCurrentDirectory (testDir root) >> doit root) 41 | `finally` removeDirectoryRecursive (testDir root) 42 | 43 | testName :: FilePath 44 | testName = "CreateUserTestTemp" 45 | 46 | testDir :: FilePath -> FilePath 47 | testDir root = root "tests" testName 48 | 49 | doit :: FilePath -> IO () 50 | doit root 51 | = do info "initialising hackage database" 52 | runServerChecked root ["init"] 53 | withServerRunning root runUserTests 54 | 55 | runUserTests :: IO () 56 | runUserTests = do 57 | do info "Getting user list" 58 | xs <- getUsers 59 | unless (xs == ["admin"]) $ 60 | die ("Bad user list: " ++ show xs) 61 | do info "Getting admin user list" 62 | xs <- getAdmins 63 | unless (groupMembers xs == ["admin"]) $ 64 | die ("Bad admin user list: " ++ show xs) 65 | 66 | testEmail1 <- do 67 | -- Create random test email addresses so that we don't confuse 68 | -- confirmation emails from other sessions 69 | testEmail1 <- mkTestEmail `liftM` randomIO 70 | testEmail2 <- mkTestEmail `liftM` randomIO 71 | createUserSelfRegister "HackageTestUser1" "Test User 1" testEmail1 72 | createUserSelfRegister "HackageTestUser2" "Test User 2" testEmail2 73 | confirmUser testEmail1 "testpass1" 74 | confirmUser testEmail2 "testpass2" 75 | return (testEmailAddress testEmail1) 76 | do info "Checking new users are now in user list" 77 | xs <- getUsers 78 | unless (xs == ["admin","HackageTestUser1","HackageTestUser2"]) $ 79 | die ("Bad user list: " ++ show xs) 80 | do info "Checking new users are not in admin list" 81 | xs <- getAdmins 82 | unless (groupMembers xs == ["admin"]) $ 83 | die ("Bad admin user list: " ++ show xs) 84 | do info "Checking new users name & contact info" 85 | ncinf <- getNameContactInfo (Auth "HackageTestUser1" "testpass1") 86 | "/user/HackageTestUser1/name-contact.json" 87 | unless (realName ncinf == "Test User 1") $ 88 | die ("Bad user real name: " ++ realName ncinf) 89 | unless (contactEmailAddress ncinf == testEmail1) $ 90 | die ("Bad user email: " ++ contactEmailAddress ncinf) 91 | do info "Checking new users admin info" 92 | uainf <- getUserAdminInfo (Auth "admin" "admin") "/user/HackageTestUser1/admin-info.json" 93 | unless (accountKind uainf == Just "AccountKindRealUser") $ 94 | die ("Bad user account kind: " ++ show (accountKind uainf)) 95 | unless ("self-registration" `isInfixOf` accountNotes uainf) $ 96 | die ("Bad user notes: " ++ accountNotes uainf) 97 | 98 | where 99 | mkTestEmail :: Int -> String 100 | mkTestEmail n = "HackageTestUser" ++ show n 101 | 102 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Recent.hs: -------------------------------------------------------------------------------- 1 | -- Takes a reversed log file on the standard input and outputs web page. 2 | 3 | module Distribution.Server.Pages.Recent ( 4 | recentPage, 5 | recentFeed, 6 | ) where 7 | 8 | import Distribution.Server.Packages.Types 9 | import qualified Distribution.Server.Users.Users as Users 10 | import Distribution.Server.Users.Users (Users) 11 | import Distribution.Server.Pages.Template 12 | ( hackagePageWithHead ) 13 | 14 | import Distribution.Package 15 | ( PackageIdentifier, packageName, packageVersion, PackageName(..) ) 16 | import Distribution.PackageDescription 17 | ( GenericPackageDescription(packageDescription) 18 | , PackageDescription(synopsis) ) 19 | import Distribution.Text 20 | ( display ) 21 | 22 | import qualified Text.XHtml.Strict as XHtml 23 | import Text.XHtml 24 | ( Html, URL, (<<), (!) ) 25 | import qualified Text.RSS as RSS 26 | import Text.RSS 27 | ( RSS(RSS) ) 28 | import Network.URI 29 | ( URI(..), uriToString ) 30 | import Data.Time.Clock 31 | ( UTCTime ) 32 | import Data.Time.Format 33 | ( formatTime ) 34 | import System.Locale 35 | ( defaultTimeLocale ) 36 | 37 | 38 | -- | Takes a list of package info, in reverse order by timestamp. 39 | -- 40 | recentPage :: Users -> [PkgInfo] -> Html 41 | recentPage users pkgs = 42 | let log_rows = map (makeRow users) (take 20 pkgs) 43 | docBody = [XHtml.h2 << "Recent additions", 44 | XHtml.table ! [XHtml.align "center"] << log_rows] 45 | rss_link = XHtml.thelink ! [XHtml.rel "alternate", 46 | XHtml.thetype "application/rss+xml", 47 | XHtml.title "Hackage RSS Feed", 48 | XHtml.href rssFeedURL] << XHtml.noHtml 49 | in hackagePageWithHead [rss_link] "recent additions" docBody 50 | 51 | makeRow :: Users -> PkgInfo -> Html 52 | makeRow users PkgInfo { 53 | pkgInfoId = pkgid 54 | , pkgUploadData = (time, userId) 55 | } = 56 | XHtml.tr << 57 | [XHtml.td ! [XHtml.align "right"] << 58 | [XHtml.toHtml (showTime time), nbsp, nbsp], 59 | XHtml.td ! [XHtml.align "left"] << display user, 60 | XHtml.td ! [XHtml.align "left"] << 61 | [nbsp, nbsp, XHtml.anchor ! 62 | [XHtml.href (packageURL pkgid)] << display pkgid]] 63 | where nbsp = XHtml.primHtmlChar "nbsp" 64 | user = Users.userIdToName users userId 65 | 66 | showTime :: UTCTime -> String 67 | showTime = formatTime defaultTimeLocale "%c" 68 | 69 | -- | URL describing a package. 70 | packageURL :: PackageIdentifier -> URL 71 | packageURL pkgid = "/package/" ++ display pkgid 72 | 73 | rssFeedURL :: URL 74 | rssFeedURL = "/recent.rss" 75 | 76 | recentAdditionsURL :: URL 77 | recentAdditionsURL = "/recent.html" 78 | 79 | recentFeed :: Users -> URI -> UTCTime -> [PkgInfo] -> RSS 80 | recentFeed users hostURI now pkgs = RSS 81 | "Recent additions" 82 | (hostURI { uriPath = recentAdditionsURL}) 83 | desc 84 | (channel now) 85 | [ releaseItem users hostURI pkg | pkg <- take 20 pkgs ] 86 | where 87 | desc = "The 20 most recent additions to Hackage, the Haskell package database." 88 | 89 | channel :: UTCTime -> [RSS.ChannelElem] 90 | channel now = 91 | [ RSS.Language "en" 92 | , RSS.ManagingEditor email 93 | , RSS.WebMaster email 94 | , RSS.ChannelPubDate now 95 | , RSS.LastBuildDate now 96 | , RSS.Generator "rss-feed" 97 | ] 98 | where 99 | email = "duncan@haskell.org (Duncan Coutts)" 100 | 101 | releaseItem :: Users -> URI -> PkgInfo -> [RSS.ItemElem] 102 | releaseItem users hostURI pkgInfo@(PkgInfo { 103 | pkgInfoId = pkgId 104 | , pkgUploadData = (time, userId) 105 | }) = 106 | [ RSS.Title title 107 | , RSS.Link uri 108 | , RSS.Guid True (uriToString id uri "") 109 | , RSS.PubDate time 110 | , RSS.Description desc 111 | ] 112 | where 113 | uri = hostURI { uriPath = packageURL pkgId } 114 | title = unPackageName (packageName pkgId) ++ " " ++ display (packageVersion pkgId) 115 | body = synopsis (packageDescription (pkgDesc pkgInfo)) 116 | desc = "Added by " ++ display user ++ ", " ++ showTime time ++ "." 117 | ++ if null body then "" else "

" ++ body 118 | user = Users.userIdToName users userId 119 | 120 | unPackageName :: PackageName -> String 121 | unPackageName (PackageName name) = name 122 | -------------------------------------------------------------------------------- /datafiles/templates/upload.html.st: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | $hackageCssTheme()$ 5 | Hackage: Uploading packages and package candidates 6 | 7 | 8 | 9 | $hackagePageHeader()$ 10 | 11 |

12 |

Uploading packages and package candidates

13 |

Uploading a package puts it in the main package index 14 | so that anyone can download it and view information about it. You can only 15 | upload a package version once, so try to get it right the first time! 16 |

17 | 18 |

You can also upload a package candidate 19 | to preview the package page, view any warnings or possible errors you might 20 | encounter, and let others install it before publishing it to the main index. 21 | (Note: you can view these warnings with 'cabal check'.) You can have multiple 22 | candidates for each package at the same time so long as they each have different 23 | versions. Finally, you can publish a candidate to the main index if it's not 24 | there already. 25 |

26 | 27 |

If you upload a package or package candidate and no other versions exist 28 | in the package database, you become part of the maintainer group for that 29 | package, and you can add other maintainers if you wish. If a maintainer group 30 | exists for a package, only its members can upload new versions of that package. 31 |

32 | 33 |

If there is no maintainer, the uploader can remove themselves from the group, 34 | and a package trustee can add anyone who wishes 35 | to assume the responsibility. The Maintainer field of the Cabal file should be 36 | None in this case. If a package is being maintained, any release not approved 37 | and supported by the maintainer should use a different package name. Then use 38 | the Maintainer field as above either to commit to supporting the fork 39 | yourself or to mark it as unsupported. 40 |

41 | 42 |

Note that all of the above is a makeshift upload policy based on the features 43 | available in the newer hackage-server. The Maintainer field has its uses, 44 | as does maintainer user groups. The libraries mailing list should probably 45 | determine the best approach for this. 46 |

47 | 48 |

Upload forms

49 |

Some last formalities: to upload a package, you'll need a Hackage 50 | username and password. (Alternatively, there's a 51 | command-line interface via cabal-install, which also needs the same username 52 | and password.) 53 |

54 | 55 |

Packages must be in the form produced by Cabal's 56 | sdist command: 57 | a gzipped tar file package-version.tar.gz 58 | comprising a directory package-version containing a package 59 | of that name and version, including package.cabal. 60 | See the notes at the bottom of the page. 61 |

62 | 63 | 64 | 65 | 66 |

Notes

67 |
    68 |
  • You should check that your source bundle builds, 69 | including the haddock documentation if it's a library.
  • 70 |
  • Categories are determined by whatever you put in the Category field 71 | (there's no agreed list of category names yet). 72 | You can have more than one category, separated by commas. If no other versions of 73 | the package exist, the categories automatically become the package's tags.
  • 74 |
  • Documentation for library packages should be generated by a maintainer. 75 | The means of doing this is still up in the air.
  • 76 |
  • We have moved to Haddock 2, and expect some glitches. 77 | If you notice anything broken, please report it on the 78 | Haddock bug tracker.
  • 79 |
  • In GHC 6.8, several modules were split from the base package 80 | into other packages. 81 | See these notes on making packages work with a range of versions of GHC.
  • 82 |
  • While Haddock 2 83 | accepts GHC features, it is also more picky about comment syntax than 84 | the old version.
  • 85 |
86 | 87 |
88 | 89 | 90 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/BuildReports.hs: -------------------------------------------------------------------------------- 1 | -- Generate an HTML page listing all build reports for a package 2 | 3 | module Distribution.Server.Pages.BuildReports ( 4 | buildReportSummary, 5 | buildReportDetail, 6 | ) where 7 | 8 | import qualified Distribution.Server.Features.BuildReports.BuildReport as BuildReport 9 | import Distribution.Server.Features.BuildReports.BuildReport (BuildReport) 10 | import Distribution.Server.Features.BuildReports.BuildReports 11 | import Distribution.Server.Pages.Template ( hackagePage ) 12 | 13 | import Distribution.Package 14 | ( PackageIdentifier ) 15 | import Distribution.PackageDescription 16 | ( FlagName(FlagName) ) 17 | import Distribution.Text 18 | ( Text, display ) 19 | 20 | import qualified Text.XHtml.Strict as XHtml 21 | import Text.XHtml.Strict 22 | ( Html, (<<), (!), tr, th, td, p, h2, ulist, li 23 | , toHtml, table, theclass, concatHtml, isNoHtml ) 24 | import Data.List (intersperse) 25 | 26 | buildReportSummary :: PackageIdentifier 27 | -> [(BuildReportId, BuildReport)] -> XHtml.Html 28 | buildReportSummary pkgid reports = hackagePage title body 29 | where 30 | title = display pkgid ++ ": build reports" 31 | body = [h2 << title, summaryTable] 32 | 33 | summaryTable = XHtml.table ! [theclass "properties"] << 34 | (headerRow : dataRows) 35 | headerRow = tr << [ th ! [XHtml.theclass "horizontal"] << 36 | columnName 37 | | columnName <- columnNames ] 38 | columnNames = ["Platform", "Compiler", "Build outcome"] 39 | dataRows = 40 | [ tr ! [theclass (if odd n then "odd" else "even")] << 41 | [ td << (display (BuildReport.arch report) 42 | ++ " / " 43 | ++ display (BuildReport.os report)) 44 | , td << display (BuildReport.compiler report) 45 | , td << detailLink reportId << 46 | display (BuildReport.installOutcome report) ] 47 | | (n, (reportId, report)) <- zip [(1::Int)..] reports ] 48 | detailLink reportId = 49 | XHtml.anchor ! [XHtml.href $ "/buildreports/" ++ display reportId ] 50 | 51 | buildReportDetail :: BuildReport -> BuildReportId -> Maybe BuildLog -> XHtml.Html 52 | buildReportDetail report reportId buildLog = hackagePage title body 53 | where 54 | title = display pkgid ++ ": build report" 55 | pkgid = BuildReport.package report 56 | body = [h2 << title, details, buildLogPara] 57 | details = tabulate 58 | [ (name, value) 59 | | (name, field) <- showFields 60 | , let value = field report 61 | , not (isNoHtml value) ] 62 | 63 | buildLogPara = p << [ ulist << [li << buildLogLink]] 64 | buildLogLink = case buildLog of 65 | Nothing -> toHtml "No build log available" 66 | _ -> XHtml.anchor ! [XHtml.href buildLogURL ] << "Build log" 67 | buildLogURL = "/buildreports/" ++ display reportId ++ "/buildlog" 68 | 69 | showFields :: [(String, BuildReport -> Html)] 70 | showFields = 71 | [ ("Package", displayHtml . BuildReport.package) 72 | , ("Platform", toHtml . platform) 73 | , ("Compiler", displayHtml . BuildReport.compiler) 74 | , ("Build client", displayHtml . BuildReport.client) 75 | , ("Configuration flags", displayHtmlFlags . BuildReport.flagAssignment) 76 | , ("Exact dependencies", displayHtmlList . BuildReport.dependencies) 77 | , ("Install outcome", displayHtml . BuildReport.installOutcome) 78 | , ("Docs outcome", displayHtml . BuildReport.docsOutcome) 79 | ] 80 | platform report' = display (BuildReport.arch report') 81 | ++ " / " 82 | ++ display (BuildReport.os report') 83 | displayHtml :: Text a => a -> Html 84 | displayHtml = toHtml . display 85 | displayHtmlList :: Text a => [a] -> Html 86 | displayHtmlList = concatHtml . intersperse (toHtml ", ") . map displayHtml 87 | displayHtmlFlags = concatHtml . intersperse (toHtml ", ") . map displayFlag 88 | displayFlag (FlagName fname, False) = toHtml $ '-':fname 89 | displayFlag (FlagName fname, True) = toHtml $ fname 90 | 91 | tabulate :: [(String, Html)] -> Html 92 | tabulate items = table ! [theclass "properties"] << 93 | [tr ! [theclass (if odd n then "odd" else "even")] << 94 | [th ! [theclass "horizontal"] << t, td << d] | 95 | (n, (t, d)) <- zip [(1::Int)..] items] 96 | 97 | -------------------------------------------------------------------------------- /Distribution/Client/ParseApacheLogs.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -fno-warn-unused-do-bind #-} 2 | -- Extract download counts from Apache log files 3 | module Distribution.Client.ParseApacheLogs 4 | ( logToDownloadCounts 5 | ) where 6 | 7 | -- TODO: We assume the Apache log files are ASCII, not Unicode. 8 | 9 | import Distribution.Package (PackageName) 10 | import Distribution.Version (Version) 11 | import Distribution.Text (display, simpleParse) 12 | 13 | import Control.Monad ((>=>)) 14 | import System.Locale (defaultTimeLocale) 15 | import Data.List (intercalate) 16 | import Data.Maybe (catMaybes) 17 | import Data.Attoparsec.Char8 (Parser) 18 | import Data.Map (Map) 19 | import Data.Time.Calendar (Day) 20 | import Data.Time.Format (parseTime) 21 | import qualified Data.ByteString.Char8 as SBS 22 | import qualified Data.Attoparsec.Char8 as Att 23 | import qualified Data.ByteString.Lazy.Char8 as LBS 24 | import qualified Data.Map as Map 25 | 26 | logToDownloadCounts :: LBS.ByteString -> LBS.ByteString 27 | logToDownloadCounts = 28 | LBS.unlines 29 | . map formatOutput 30 | . Map.toList 31 | . accumHist 32 | . catMaybes 33 | . map ((packageGET >=> parseGET) . parseLine . SBS.concat . LBS.toChunks) 34 | . LBS.lines 35 | 36 | data LogLine = LogLine { 37 | _getIP :: !SBS.ByteString 38 | , _getIdent :: !SBS.ByteString 39 | , _getUser :: !SBS.ByteString 40 | , getDate :: !SBS.ByteString 41 | , getReq :: !SBS.ByteString 42 | , _getStatus :: !SBS.ByteString 43 | , _getBytes :: !SBS.ByteString 44 | , _getRef :: !SBS.ByteString 45 | , _getUA :: !SBS.ByteString 46 | } deriving (Ord, Show, Eq) 47 | 48 | plainValue :: Parser SBS.ByteString 49 | plainValue = Att.takeWhile1 (\c -> c /= ' ' && c /= '\n') --many1' (noneOf " \n") 50 | 51 | bracketedValue :: Parser SBS.ByteString 52 | bracketedValue = do 53 | Att.char '[' 54 | content <- Att.takeWhile1 (\c -> c /= ']') --many' (noneOf "]") 55 | Att.char ']' 56 | return content 57 | 58 | quotedValue :: Parser SBS.ByteString 59 | quotedValue = do 60 | Att.char '"' 61 | content <- Att.takeWhile1 (\c -> c /= '"') --many' (noneOf "\"") 62 | Att.char '"' 63 | return content 64 | 65 | logLine :: Parser LogLine 66 | logLine = do 67 | ip <- plainValue ; Att.skipSpace 68 | ident <- plainValue ; Att.skipSpace 69 | usr <- plainValue ; Att.skipSpace 70 | date <- bracketedValue ; Att.skipSpace 71 | req <- quotedValue ; Att.skipSpace 72 | status <- plainValue ; Att.skipSpace 73 | bytes <- plainValue ; Att.skipSpace 74 | ref <- quotedValue ; Att.skipSpace 75 | ua <- quotedValue 76 | return $! LogLine ip ident usr date req status bytes ref ua 77 | 78 | parseLine :: SBS.ByteString -> Either SBS.ByteString LogLine 79 | parseLine line = case Att.parseOnly logLine line of 80 | Left _ -> Left line 81 | Right res -> Right res 82 | 83 | packageGET :: Either a LogLine -> Maybe (SBS.ByteString, SBS.ByteString, SBS.ByteString) 84 | packageGET (Right logline) 85 | | [method, path, _] <- SBS.words (getReq logline) 86 | , method == methodGET 87 | , [root, dir1, dir2, name, ver, tarball] <- SBS.split '/' path 88 | , SBS.null root, dir1 == packagesDir, dir2 == archiveDir 89 | , SBS.isSuffixOf targzExt tarball 90 | = Just (name, ver, getDate logline) 91 | packageGET _ = Nothing 92 | 93 | parseGET :: (SBS.ByteString, SBS.ByteString, SBS.ByteString) -> Maybe (PackageName, Version, Day) 94 | parseGET (pkgNameStr, pkgVersionStr, dayStr) = do 95 | name <- simpleParse . SBS.unpack $ pkgNameStr 96 | version <- simpleParse . SBS.unpack $ pkgVersionStr 97 | day <- parseTime defaultTimeLocale "%d/%b/%Y:%T %z" . SBS.unpack $ dayStr 98 | return (name, version, day) 99 | 100 | methodGET, packagesDir, archiveDir, targzExt :: SBS.ByteString 101 | methodGET = SBS.pack "GET" 102 | packagesDir = SBS.pack "packages" 103 | archiveDir = SBS.pack "archive" 104 | targzExt = SBS.pack ".tar.gz" 105 | 106 | accumHist :: Ord k => [k] -> Map k Int 107 | accumHist es = Map.fromListWith (+) [ (pkgId,1) | pkgId <- es ] 108 | 109 | formatOutput :: ((PackageName, Version, Day), Int) -> LBS.ByteString 110 | formatOutput ((name, version, day), numDownloads) = 111 | LBS.pack $ intercalate "," $ map show [ display name 112 | , show day 113 | , display version 114 | , show numDownloads 115 | ] 116 | -------------------------------------------------------------------------------- /Distribution/Server/Features/PackageCandidates/Backup.hs: -------------------------------------------------------------------------------- 1 | module Distribution.Server.Features.PackageCandidates.Backup where 2 | 3 | import Distribution.Server.Framework.BackupRestore 4 | import Distribution.Server.Framework.BackupDump 5 | import Distribution.Server.Features.PackageCandidates.State 6 | import Distribution.Server.Features.PackageCandidates.Types 7 | import Distribution.Server.Features.Core.Backup as CoreBackup 8 | import qualified Distribution.Server.Packages.PackageIndex as PackageIndex 9 | import Distribution.Package (PackageId, packageId) 10 | import Text.CSV (CSV) 11 | import Data.Version (Version(Version), showVersion) 12 | import Data.Map (Map) 13 | import qualified Data.Map as Map 14 | 15 | {------------------------------------------------------------------------------- 16 | Restore backup 17 | -------------------------------------------------------------------------------} 18 | 19 | data PartialCandidate = PartialCandidate { 20 | partialWarnings :: [String] 21 | , partialPublic :: Bool 22 | } 23 | 24 | type PartialCandidates = Map PackageId PartialCandidate 25 | 26 | restoreCandidates :: RestoreBackup CandidatePackages 27 | restoreCandidates = updateCandidates Map.empty Map.empty 28 | 29 | -- We keep the partial packages separate from the rest of the candidate info 30 | -- so that we can reuse more of CoreBackup 31 | updateCandidates :: PartialIndex -> PartialCandidates -> RestoreBackup CandidatePackages 32 | updateCandidates packageMap candidateMap = RestoreBackup { 33 | restoreEntry = \entry -> do 34 | packageMap' <- doPackageImport packageMap entry 35 | candidateMap' <- doCandidateImport candidateMap entry 36 | return (updateCandidates packageMap' candidateMap') 37 | , restoreFinalize = do 38 | let combined = combineMaps packageMap candidateMap 39 | results <- mapM mkCandidate (Map.toList combined) 40 | return $ CandidatePackages (PackageIndex.fromList results) 41 | } 42 | where 43 | mkCandidate :: (PackageId, (Maybe PartialPkg, Maybe PartialCandidate)) -> Restore CandPkgInfo 44 | mkCandidate (pkgId, (Just partialPkg, Just partialCand)) = do 45 | pkg <- CoreBackup.partialToFullPkg (pkgId, partialPkg) 46 | return CandPkgInfo { candPkgInfo = pkg 47 | , candWarnings = partialWarnings partialCand 48 | , candPublic = partialPublic partialCand 49 | } 50 | mkCandidate (pkgId, (Nothing, Just _)) = 51 | fail $ show pkgId ++ ": candidate.csv without corresponding package" 52 | mkCandidate (pkgId, (Just _, Nothing)) = 53 | fail $ show pkgId ++ ": missing candidate.csv" 54 | mkCandidate _ = 55 | fail "mkCandidate: the impossible happened" 56 | 57 | combineMaps :: Ord k => Map k a -> Map k b -> Map k (Maybe a, Maybe b) 58 | combineMaps map1 map2 = 59 | let map1' = Map.map (\x -> (Just x, Nothing)) map1 60 | map2' = Map.map (\y -> (Nothing, Just y)) map2 61 | in Map.unionWith (\e1 e2 -> (fst e1, snd e2)) map1' map2' 62 | 63 | doCandidateImport :: PartialCandidates -> BackupEntry -> Restore PartialCandidates 64 | doCandidateImport candidates (BackupByteString ["package", pkgStr, "candidate.csv"] bs) = do 65 | pkgId <- CoreBackup.parsePackageId pkgStr 66 | csv <- importCSV "candidate.csv" bs 67 | partial <- case csv of 68 | [_version, ["public", public], "warnings" : warnings] -> 69 | return PartialCandidate { partialWarnings = warnings 70 | , partialPublic = read public 71 | } 72 | _ -> 73 | fail "candidate.csv has an invalid format" 74 | return (Map.insert pkgId partial candidates) 75 | doCandidateImport candidates _ = 76 | return candidates 77 | 78 | {------------------------------------------------------------------------------- 79 | Create backup 80 | -------------------------------------------------------------------------------} 81 | 82 | backupCandidates :: CandidatePackages -> [BackupEntry] 83 | backupCandidates st = concatMap backupCandidate candidates 84 | where 85 | candidates :: [CandPkgInfo] 86 | candidates = PackageIndex.allPackages (candidateList st) 87 | 88 | backupCandidate :: CandPkgInfo -> [BackupEntry] 89 | backupCandidate candidate = 90 | csvToBackup (CoreBackup.pkgPath (packageId candidate) "candidate.csv") 91 | (backupExtraInfo candidate) 92 | : CoreBackup.infoToAllEntries (candPkgInfo candidate) 93 | 94 | -- | Backup the information CandPkgInfo adds on top of PkgInfo 95 | backupExtraInfo :: CandPkgInfo -> CSV 96 | backupExtraInfo candidate = [ 97 | [showVersion versionCSV] 98 | , ["public", show (candPublic candidate)] 99 | , "warnings" : candWarnings candidate 100 | ] 101 | where 102 | versionCSV = Version [0,1] ["unstable"] 103 | -------------------------------------------------------------------------------- /Distribution/Server/Util/NameIndex.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable, TemplateHaskell, 2 | FlexibleInstances, FlexibleContexts, MultiParamTypeClasses #-} 3 | -- TypeOperators, TypeSynonymInstances, TypeFamilies 4 | 5 | module Distribution.Server.Util.NameIndex where 6 | 7 | import Data.Map (Map) 8 | import Data.Typeable (Typeable) 9 | import qualified Data.Map as Map 10 | import Data.Set (Set) 11 | import qualified Data.Set as Set 12 | import Data.Char (toLower) 13 | import Data.List (unfoldr, foldl') 14 | import Data.Maybe (maybeToList) 15 | import Control.DeepSeq 16 | import Data.SafeCopy 17 | 18 | import Distribution.Server.Framework.MemSize 19 | 20 | -- | Case-insensitive name search. This is meant to be an enhanced set of 21 | -- names, not a full text search. It's also meant to be a sort of a short-term 22 | -- solution for name suggestion searches; e.g., package searches should also 23 | -- consider the tagline of a package. 24 | data NameIndex = NameIndex { 25 | -- | This is the mapping from case-insensitive search term -> name. 26 | nameIndex :: Map String (Set String), 27 | -- | This is the set of names. 28 | storedNamesIndex :: Set String, 29 | -- | This is the specification of the type of generator, mainly here because 30 | -- functions can't be serialized. Just str means to break on any char in 31 | -- str (breakGenerator); Nothing is defaultGenerator. 32 | nameGenType :: Maybe [Char], 33 | -- | This is the generator of search terms from names. 34 | nameSearchGenerator :: String -> [String] 35 | } deriving (Typeable) 36 | 37 | emptyNameIndex :: Maybe [Char] -> NameIndex 38 | emptyNameIndex gen = NameIndex Map.empty Set.empty gen $ case gen of 39 | Nothing -> defaultGenerator 40 | Just st -> breakGenerator st 41 | 42 | defaultGenerator :: String -> [String] 43 | defaultGenerator name = [name] 44 | 45 | breakGenerator :: [Char] -> String -> [String] 46 | breakGenerator breakStr name = name:unfoldr unfoldName name 47 | where unfoldName str = case break (`elem` breakStr) str of 48 | ([], _) -> Nothing 49 | (_, []) -> Nothing 50 | (_, _:str') -> Just (str', str') 51 | 52 | constructIndex :: [String] -> Maybe [Char] -> NameIndex 53 | constructIndex strs gen = foldl' (flip addName) (emptyNameIndex gen) strs 54 | 55 | addName :: String -> NameIndex -> NameIndex 56 | addName caseName (NameIndex index stored gen' gen) = 57 | let name = map toLower caseName 58 | nameSet = Set.singleton caseName 59 | forName = Map.fromList $ map (\term -> (term, nameSet)) (gen name) 60 | in NameIndex (Map.unionWith Set.union index forName) 61 | (Set.insert caseName stored) gen' gen 62 | 63 | deleteName :: String -> NameIndex -> NameIndex 64 | deleteName caseName (NameIndex index stored gen' gen) = 65 | let name = map toLower caseName 66 | nameSet = Set.singleton caseName 67 | forName = Map.fromList $ map (\term -> (term, nameSet)) (gen name) 68 | in NameIndex (Map.differenceWith (\a b -> keepSet $ Set.difference a b) index forName) 69 | (Set.delete caseName stored) gen' gen 70 | where keepSet s = if Set.null s then Nothing else Just s 71 | 72 | lookupName :: String -> NameIndex -> Set String 73 | lookupName caseName (NameIndex index _ _ _) = 74 | Map.findWithDefault Set.empty (map toLower caseName) index 75 | 76 | lookupPrefix :: String -> NameIndex -> Set String 77 | lookupPrefix caseName (NameIndex index _ _ _) = 78 | let name = map toLower caseName 79 | (_, mentry, startTree) = Map.splitLookup name index 80 | -- the idea is, select all names in the range [name, mapLast succ name) 81 | -- an alternate idea would just be to takeWhile (`isPrefixOf` name) 82 | (totalTree, _, _) = Map.splitLookup (mapLast succ name) startTree 83 | nameSets = maybeToList mentry ++ Map.elems totalTree 84 | in Set.unions nameSets 85 | 86 | takeSetPrefix :: String -> Set String -> Set String 87 | takeSetPrefix name strs = 88 | let (_, present, startSet) = Set.splitMember name strs 89 | (totalSet, _, _) = Set.splitMember (mapLast succ name) startSet 90 | in (if present then Set.insert name else id) totalSet 91 | 92 | -- | Map only the last element of a list 93 | mapLast :: (a -> a) -> [a] -> [a] 94 | mapLast f (x:[]) = f x:[] 95 | mapLast f (x:xs) = x:mapLast f xs 96 | mapLast _ [] = [] 97 | 98 | -- store arguments which can be sent to constructIndex :: [String] -> Maybe [Char] -> NameIndex 99 | instance SafeCopy NameIndex where 100 | putCopy index = contain $ safePut (nameGenType index) >> safePut (storedNamesIndex index) 101 | getCopy = contain $ do 102 | gen <- safeGet 103 | index <- safeGet 104 | return $ constructIndex (Set.toList index) gen 105 | 106 | instance NFData NameIndex where 107 | rnf (NameIndex a b _ _) = rnf a `seq` rnf b 108 | 109 | instance MemSize NameIndex where 110 | memSize (NameIndex a b c d) = memSize4 a b c d 111 | -------------------------------------------------------------------------------- /Distribution/Server/Framework/Error.hs: -------------------------------------------------------------------------------- 1 | -- | Error handling style for the hackage server. 2 | -- 3 | -- The point is to be able to abort and return appropriate HTTP errors, plus 4 | -- human readable messages. 5 | -- 6 | -- We use a standard error monad / exception style so that we can hide some of 7 | -- the error checking plumbing. 8 | -- 9 | -- We use a custom error type that enables us to render the error in an 10 | -- appropriate way, ie themed html or plain text, depending on the context. 11 | -- 12 | module Distribution.Server.Framework.Error ( 13 | 14 | -- * Server error monad 15 | ServerPartE, 16 | 17 | -- * Generating errors 18 | MessageSpan(..), 19 | errBadRequest, 20 | errForbidden, 21 | errNotFound, 22 | errBadMediaType, 23 | errInternalError, 24 | throwError, 25 | 26 | -- * Handling errors 27 | ErrorResponse(..), 28 | runServerPartE, 29 | handleErrorResponse, 30 | messageToText, 31 | 32 | -- * Handy error message operator 33 | (?!) 34 | ) where 35 | 36 | import Happstack.Server 37 | import Control.Monad.Error 38 | 39 | import qualified Data.Text.Lazy as Text 40 | import qualified Data.Text.Lazy.Encoding as Text 41 | 42 | -- | The \"oh noes?!\" operator 43 | -- 44 | (?!) :: Maybe a -> e -> Either e a 45 | ma ?! e = maybe (Left e) Right ma 46 | 47 | 48 | -- | A derivative of the 'ServerPartT' monad with an extra error monad layer. 49 | -- 50 | -- So we can use the standard 'MonadError' methods like 'throwError'. 51 | -- 52 | type ServerPartE a = ServerPartT (ErrorT ErrorResponse IO) a 53 | 54 | -- | A type for generic error reporting that should be sufficient for 55 | -- most purposes. 56 | -- 57 | data ErrorResponse = ErrorResponse { 58 | errorCode :: Int, 59 | errorHeaders:: [(String, String)], 60 | errorTitle :: String, 61 | errorDetail :: [MessageSpan] 62 | } deriving (Eq, Show) 63 | 64 | instance ToMessage ErrorResponse where 65 | toResponse (ErrorResponse code hdrs title detail) = 66 | let rspbody = title ++ ": " ++ messageToText detail ++ "\n" 67 | in Response { 68 | rsCode = code, 69 | rsHeaders = mkHeaders (("Content-Type", "text/plain") : reverse hdrs), 70 | rsFlags = nullRsFlags { rsfLength = ContentLength }, 71 | rsBody = Text.encodeUtf8 (Text.pack rspbody), 72 | rsValidator = Nothing 73 | } 74 | 75 | -- | A message possibly including hypertext links. 76 | -- 77 | -- The point is to be able to render error messages either as text or as html. 78 | -- 79 | data MessageSpan = MLink String String | MText String 80 | deriving (Eq, Show) 81 | 82 | -- | Format a message as simple text. 83 | -- 84 | -- For html or other formats you'll have to write your own function! 85 | -- 86 | messageToText :: [MessageSpan] -> String 87 | messageToText [] = "" 88 | messageToText (MLink x _:xs) = x ++ messageToText xs 89 | messageToText (MText x :xs) = x ++ messageToText xs 90 | 91 | -- We don't want to use these methods directly anyway. 92 | instance Error ErrorResponse where 93 | noMsg = ErrorResponse 500 [] "Internal server error" [] 94 | strMsg str = ErrorResponse 500 [] "Internal server error" [MText str] 95 | 96 | 97 | errBadRequest :: String -> [MessageSpan] -> ServerPartE a 98 | errBadRequest title message = throwError (ErrorResponse 400 [] title message) 99 | 100 | -- note: errUnauthorized is deliberately not provided because exceptions thrown 101 | -- in this way bypass the FilterMonad stuff and so setHeaderM etc are ignored 102 | -- but setHeaderM are usually needed for responding to auth errors. 103 | 104 | errForbidden :: String -> [MessageSpan] -> ServerPartE a 105 | errForbidden title message = throwError (ErrorResponse 403 [] title message) 106 | 107 | errNotFound :: String -> [MessageSpan] -> ServerPartE a 108 | errNotFound title message = throwError (ErrorResponse 404 [] title message) 109 | 110 | errBadMediaType :: String -> [MessageSpan] -> ServerPartE a 111 | errBadMediaType title message = throwError (ErrorResponse 415 [] title message) 112 | 113 | errInternalError :: [MessageSpan] -> ServerPartE a 114 | errInternalError message = throwError (ErrorResponse 500 [] title message) 115 | where 116 | title = "Internal server error" 117 | 118 | -- | Run a 'ServerPartE', including a top-level fallback error handler. 119 | -- 120 | -- Any 'ErrorResponse' exceptions are turned into a simple error response with 121 | -- a \"text/plain\" formated body. 122 | -- 123 | -- To use a nicer custom formatted error response, use 'handleErrorResponse'. 124 | -- 125 | runServerPartE :: ServerPartE a -> ServerPart a 126 | runServerPartE = mapServerPartT' (spUnwrapErrorT fallbackHandler) 127 | where 128 | fallbackHandler :: ErrorResponse -> ServerPart a 129 | fallbackHandler err = finishWith (toResponse err) 130 | 131 | handleErrorResponse :: (ErrorResponse -> ServerPartE Response) 132 | -> ServerPartE a -> ServerPartE a 133 | handleErrorResponse handler action = 134 | catchError action (\errResp -> handler errResp >>= finishWith) 135 | -------------------------------------------------------------------------------- /Distribution/Server/Util/CountingMap.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, UndecidableInstances, DeriveDataTypeable, ScopedTypeVariables #-} 2 | module Distribution.Server.Util.CountingMap ( 3 | NestedCountingMap(..) 4 | , SimpleCountingMap(..) 5 | , CountingMap(..) 6 | , cmFromCSV 7 | ) where 8 | 9 | import Prelude hiding (rem) 10 | 11 | import Data.Map (Map) 12 | import qualified Data.Map as Map 13 | import Data.Maybe (fromMaybe) 14 | import Data.Typeable (Typeable) 15 | import Text.CSV (CSV, Record) 16 | import Control.Applicative ((<$>), (<*>)) 17 | 18 | import Data.SafeCopy (SafeCopy(..), safeGet, safePut, contain) 19 | 20 | import Distribution.Text (Text(..), display) 21 | 22 | import Distribution.Server.Framework.Instances () 23 | import Distribution.Server.Framework.MemSize 24 | import Distribution.Server.Framework.BackupRestore (parseRead, parseText) 25 | 26 | {------------------------------------------------------------------------------ 27 | We define some generic machinery to give us functions for arbitrarily 28 | nested "counting maps". 29 | 30 | We define SimpleCountingMap as a separate type from NestedCountingMap as 31 | a hint to the type checker (we get into trouble with the functional 32 | dependencies otherwise). 33 | ------------------------------------------------------------------------------} 34 | 35 | data NestedCountingMap a b = NCM { 36 | nestedTotalCount :: Int 37 | , nestedCountingMap :: Map a b 38 | } 39 | deriving (Show, Eq, Typeable) 40 | 41 | newtype SimpleCountingMap a = SCM { 42 | simpleCountingMap :: NestedCountingMap a Int 43 | } 44 | deriving (Show, Eq, Typeable) 45 | 46 | class CountingMap k a | a -> k where 47 | cmEmpty :: a 48 | cmTotal :: a -> Int 49 | cmInsert :: k -> Int -> a -> a 50 | cmFind :: k -> a -> Int 51 | cmToList :: a -> [(k, Int)] 52 | 53 | cmToCSV :: a -> CSV 54 | cmInsertRecord :: Monad m => Record -> a -> m (a, Int) 55 | 56 | instance (Ord k, Text k) => CountingMap k (SimpleCountingMap k) where 57 | cmEmpty = SCM (NCM 0 Map.empty) 58 | 59 | cmTotal (SCM (NCM total _)) = total 60 | 61 | cmInsert k n (SCM (NCM total m)) = 62 | SCM (NCM (total + n) (adjustFrom (+ n) k 0 m)) 63 | 64 | cmFind k (SCM (NCM _ m)) = Map.findWithDefault 0 k m 65 | 66 | cmToList (SCM (NCM _ m)) = Map.toList m 67 | 68 | cmToCSV (SCM (NCM _ m)) = map aux (Map.toList m) 69 | where 70 | aux :: (k, Int) -> Record 71 | aux (k, n) = [display k, show n] 72 | 73 | cmInsertRecord [k, n] m = do 74 | key <- parseText "key" k 75 | count <- parseRead "count" n 76 | return (cmInsert key count m, count) 77 | cmInsertRecord _ _ = 78 | fail "cmInsertRecord: Invalid record" 79 | 80 | instance (Text k, Ord k, Eq l, CountingMap l a) => CountingMap (k, l) (NestedCountingMap k a) where 81 | cmEmpty = NCM 0 Map.empty 82 | 83 | cmTotal (NCM total _m) = total 84 | 85 | cmInsert (k, l) n (NCM total m) = 86 | NCM (total + n) (adjustFrom (cmInsert l n) k cmEmpty m) 87 | 88 | cmFind (k, l) (NCM _ m) = cmFind l (Map.findWithDefault cmEmpty k m) 89 | 90 | cmToList (NCM _ m) = concatMap aux (Map.toList m) 91 | where 92 | aux :: (k, a) -> [((k, l), Int)] 93 | aux (k, m') = map (\(l, c) -> ((k, l), c)) (cmToList m') 94 | 95 | cmToCSV (NCM _ m) = concatMap aux (Map.toList m) 96 | where 97 | aux :: (k, a) -> CSV 98 | aux (k, m') = map (display k:) (cmToCSV m') 99 | 100 | cmInsertRecord (k : record) (NCM total m) = do 101 | key <- parseText "key" k 102 | let submap = Map.findWithDefault cmEmpty key m 103 | (submap', added) <- cmInsertRecord record submap 104 | return (NCM (total + added) (Map.insert key submap' m), added) 105 | cmInsertRecord [] _ = 106 | fail "cmInsertRecord: Invalid record" 107 | 108 | cmFromCSV :: (Monad m, CountingMap k a) => CSV -> m a 109 | cmFromCSV = go cmEmpty 110 | where 111 | go acc [] = return acc 112 | go acc (r:rs) = do 113 | (acc', _) <- cmInsertRecord r acc 114 | go acc' rs 115 | 116 | {------------------------------------------------------------------------------ 117 | Auxiliary 118 | ------------------------------------------------------------------------------} 119 | 120 | adjustFrom :: Ord k => (a -> a) -> k -> a -> Map k a -> Map k a 121 | adjustFrom func key value = Map.alter (Just . func . fromMaybe value) key 122 | 123 | {------------------------------------------------------------------------------ 124 | Type classes instances 125 | ------------------------------------------------------------------------------} 126 | 127 | instance MemSize a => MemSize (SimpleCountingMap a) where 128 | memSize (SCM m) = memSize m 129 | 130 | instance (MemSize a, MemSize b) => MemSize (NestedCountingMap a b) where 131 | memSize (NCM a b) = memSize2 a b 132 | 133 | instance (Ord a, SafeCopy a, SafeCopy b) => SafeCopy (NestedCountingMap a b) where 134 | putCopy (NCM total m) = contain $ do safePut total ; safePut m 135 | getCopy = contain $ NCM <$> safeGet <*> safeGet 136 | 137 | instance (Ord a, SafeCopy a) => SafeCopy (SimpleCountingMap a) where 138 | putCopy (SCM m) = contain $ safePut m 139 | getCopy = contain $ SCM <$> safeGet 140 | -------------------------------------------------------------------------------- /Distribution/Server/Features/Search/ExtractNameTerms.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns, NamedFieldPuns, GeneralizedNewtypeDeriving #-} 2 | 3 | module Distribution.Server.Features.Search.ExtractNameTerms ( 4 | extractPackageNameTerms, 5 | extractModuleNameTerms, 6 | ) where 7 | 8 | import Data.Text (Text) 9 | import qualified Data.Text as T 10 | import Data.Char (isUpper, isDigit) 11 | import Data.List 12 | import Data.List.Split hiding (Splitter) 13 | import Data.Maybe (maybeToList) 14 | 15 | import Data.Functor.Identity 16 | import Control.Monad 17 | import Control.Monad.List 18 | import Control.Monad.Writer 19 | import Control.Monad.State 20 | 21 | 22 | extractModuleNameTerms :: String -> [Text] 23 | extractModuleNameTerms modname = 24 | map T.toCaseFold $ 25 | nub $ 26 | map T.pack $ 27 | flip runSplitter modname $ do 28 | _ <- forEachPart splitDot 29 | _ <- forEachPart splitCamlCase 30 | satisfy (not . singleChar) 31 | get >>= emit 32 | 33 | extractPackageNameTerms :: String -> [Text] 34 | extractPackageNameTerms pkgname = 35 | map T.toCaseFold $ 36 | nub $ 37 | map T.pack $ 38 | flip runSplitter pkgname $ do 39 | 40 | fstComponentHyphen <- forEachPart splitHyphen 41 | 42 | satisfy (`notElem` ["hs", "haskell"]) 43 | 44 | _ <- forEachPart stripPrefixH 45 | 46 | fstComponentCaml <- forEachPart splitCamlCase 47 | 48 | fstComponent2 <- forEachPart splitOn2 49 | 50 | when (fstComponentHyphen && fstComponentCaml && fstComponent2) $ do 51 | forEachPartAndWhole stripPrefix_h 52 | _ <- forEachPart (maybeToList . stripPrefix "lib") 53 | _ <- forEachPart (maybeToList . stripSuffix "lib") 54 | _ <- forEachPart stripSuffixNum 55 | satisfy (not . singleChar) 56 | 57 | get >>= emit 58 | 59 | newtype Split a = Split (StateT String (ListT (WriterT [String] Identity)) a) 60 | deriving (Monad, MonadPlus, MonadState String) 61 | 62 | emit :: String -> Split () 63 | emit x = Split (lift (lift (tell [x]))) 64 | 65 | forEach :: [a] -> Split a 66 | forEach = msum . map return 67 | 68 | runSplitter :: Split () -> String -> [String] 69 | runSplitter (Split m) s = snd (runWriter (runListT (runStateT m s))) 70 | 71 | singleChar :: String -> Bool 72 | singleChar [_] = True 73 | singleChar _ = False 74 | 75 | satisfy :: (String -> Bool) -> Split () 76 | satisfy p = get >>= guard . p 77 | 78 | forEachPart :: (String -> [String]) -> Split Bool 79 | forEachPart parts = do 80 | t <- get 81 | case parts t of 82 | [] -> return True 83 | [t'] | t == t' -> return True 84 | ts -> do emit t 85 | (t', n) <- forEach (zip ts [1::Int ..]) 86 | put t' 87 | return (n==1) 88 | 89 | forEachPartAndWhole :: (String -> [String]) -> Split () 90 | forEachPartAndWhole parts = do 91 | t <- get 92 | case parts t of 93 | [] -> return () 94 | ts -> forEach (t:ts) >>= put 95 | 96 | 97 | splitDot :: String -> [String] 98 | splitDot = split (dropBlanks $ dropDelims $ whenElt (=='.')) 99 | 100 | splitHyphen :: String -> [String] 101 | splitHyphen = split (dropBlanks $ dropDelims $ whenElt (=='-')) 102 | 103 | splitCamlCase :: String -> [String] 104 | splitCamlCase = split (dropInitBlank $ condense $ keepDelimsL $ whenElt isUpper) 105 | 106 | stripPrefixH :: String -> [String] 107 | stripPrefixH ('H':'S':frag) | all isUpper frag = [frag] 108 | stripPrefixH "HTTP" = [] 109 | stripPrefixH ('H':frag@(c:_)) | isUpper c = [frag] 110 | stripPrefixH _ = [] 111 | 112 | stripPrefix_h :: String -> [String] 113 | stripPrefix_h "http" = [] 114 | stripPrefix_h "html" = [] 115 | stripPrefix_h ('h':'s':frag) = ['s':frag, frag] 116 | stripPrefix_h ('h':frag) {- | Set.notMember (T.pack w) ws -} = [frag] 117 | stripPrefix_h _ = [] 118 | 119 | stripSuffix :: String -> String -> Maybe String 120 | stripSuffix s t = fmap reverse (stripPrefix (reverse s) (reverse t)) 121 | 122 | stripSuffixNum :: String -> [String] 123 | stripSuffixNum s 124 | | null rd || null rs' = [] 125 | | otherwise = [s', d] 126 | where 127 | rs = reverse s 128 | (rd, rs') = span isDigit rs 129 | d = reverse rd 130 | s' = reverse rs' 131 | 132 | splitOn2 :: String -> [String] 133 | splitOn2 t = 134 | case break (=='2') t of 135 | (from@(_:_), '2':to@(c:_)) 136 | | not (isDigit c) 137 | , not (length from == 1 && length to == 1) 138 | -> [from, to] 139 | _ -> [] 140 | 141 | 142 | ------------------- 143 | -- experiment 144 | -- 145 | {- 146 | main = do 147 | pkgsFile <- readFile "pkgs3" 148 | let pkgs :: [PackageDescription] 149 | pkgs = map read (lines pkgsFile) 150 | 151 | -- print "forcing pkgs..." 152 | -- evaluate (foldl' (\a p -> seq p a) () pkgs) 153 | 154 | sequence_ 155 | [ putStrLn $ display (packageName pkg) ++ ": " ++ display mod ++ " -> " 156 | ++ intercalate ", " (map T.unpack $ extractModuleNameTerms (display mod)) 157 | | pkg <- pkgs 158 | , Just lib <- [library pkg] 159 | , let mods = exposedModules lib 160 | , mod <- mods ] 161 | -} 162 | -------------------------------------------------------------------------------- /Distribution/Server/Pages/Distributions.hs: -------------------------------------------------------------------------------- 1 | 2 | module Distribution.Server.Pages.Distributions 3 | ( homePage 4 | , adminHomePage 5 | , distroListing 6 | , distroPage 7 | , adminDistroPage 8 | ) 9 | where 10 | 11 | import Distribution.Server.Pages.Template (hackagePage) 12 | import Distribution.Server.Features.Distro.Distributions 13 | import Distribution.Server.Users.Types 14 | import Distribution.Text 15 | 16 | import Distribution.Package 17 | import qualified Happstack.Server.SURI as SURI 18 | 19 | import System.FilePath.Posix 20 | import Text.XHtml.Strict 21 | 22 | -- | List of known distributions 23 | homePage :: [DistroName] -> Html 24 | homePage = hackagePage "Distributions" . listing "/distro" 25 | 26 | -- | List of known distributions. Includes a form 27 | -- to add a new distribution. 28 | adminHomePage :: [DistroName] -> Html 29 | adminHomePage distros 30 | = hackagePage "Distributions" $ concat 31 | [ listing "/admin/distro" distros 32 | , addDistroForm 33 | ] 34 | 35 | -- | Display the packages in a distribution. The passed-in URL is the 36 | -- link to the admin page. 37 | distroListing :: DistroName -> [(PackageName, DistroPackageInfo)] -> URL -> Html 38 | distroListing distro packages adminLink 39 | = hackagePage (display distro) $ concat 40 | [ packageListing 41 | , adminLinkHtml 42 | ] 43 | 44 | where 45 | packageListing :: [Html] 46 | packageListing 47 | = [ h3 << ("Packages in " ++ display distro) 48 | , ulist << map (uncurry packageHtml) packages 49 | ] 50 | 51 | packageHtml pName pInfo 52 | = li << (display pName ++ " " ++ display (distroVersion pInfo)) 53 | 54 | adminLinkHtml 55 | = [ h3 << "Admin Tasks" 56 | , anchor ! [href adminLink] << "Administrative tasks" 57 | ] 58 | 59 | -- | Admin page for a distribution. Includes a list 60 | -- of the maintainers and a form to add maintainers. 61 | distroPage :: DistroName -> [UserName] -> Html 62 | distroPage distro users 63 | = hackagePage (display distro) $ concat 64 | [ addPackageForm distro 65 | , userList distro users 66 | , addUserForm distro 67 | ] 68 | 69 | addPackageForm :: DistroName -> [Html] 70 | addPackageForm distro = 71 | let actionUri = 72 | "/distro" SURI.escape distro "admin" "addPackage" 73 | in [ h3 << "Add a package" 74 | , gui actionUri ! [theclass "box"] << 75 | [ p << [stringToHtml "Package: ", textfield "packageName"] 76 | , p << [stringToHtml "Version: ", textfield "version"] 77 | , p << [stringToHtml "URL: ", textfield "uri"] 78 | , submit "submit" "Add package" 79 | ] 80 | ] 81 | 82 | addDistroForm :: [Html] 83 | addDistroForm = 84 | [ h3 << "Add Distribution" 85 | , gui "/admin/createDistro" ! [theclass "box"] << 86 | [ p << [stringToHtml "Name: ", textfield "distroName"] 87 | , submit "submit" "Add distribution" 88 | ] 89 | ] 90 | 91 | {- 92 | This should be updated to match the current URI scheme in the Distro feature. 93 | 94 | displayDir :: Text a => a -> String 95 | displayDir = escapeString f . display 96 | where f c = okInPath c && c /= '/' 97 | 98 | -- | Admin form for a distribution. Includes a list 99 | -- of the maintainers, a form to add maintainers and 100 | -- a button to destroy this distribution. 101 | adminDistroPage :: DistroName -> [UserName] -> Html 102 | adminDistroPage distro users 103 | = hackagePage (display distro) $ concat 104 | [ userList distro users 105 | , addUserForm distro 106 | , deleteDistro distro 107 | ] 108 | 109 | addUserForm :: DistroName -> [Html] 110 | addUserForm distro = 111 | [ h3 << "Add a maintainer" 112 | , gui ("/distro" displayDir distro "admin" "addMember") ! [theclass "box"] 113 | << [ p << [stringToHtml "User: ", textfield "userName"] 114 | , submit "submit" "Add user" 115 | ] 116 | ] 117 | 118 | deleteDistro :: DistroName -> [Html] 119 | deleteDistro distro 120 | = [ h3 << "Delete distribution" 121 | , gui ("/admin/distro" displayDir distro "delete") << 122 | submit "submit" "Delete Distribution" 123 | ] 124 | 125 | userList :: DistroName -> [UserName] -> [Html] 126 | userList distro users 127 | = [ h3 << "Maintainers" 128 | , ulist << map (userHtml distro) users 129 | ] 130 | 131 | userHtml :: DistroName -> UserName -> Html 132 | userHtml distro user 133 | = li << [ stringToHtml $ display user 134 | , removeUser user distro 135 | ] 136 | 137 | removeUser :: UserName -> DistroName -> Html 138 | removeUser user distro 139 | = gui ("/distro" displayDir distro "admin" "removeMember") 140 | << [ hidden "userName" $ display user 141 | , submit "submit" "Remove" 142 | ]-} 143 | 144 | listing :: FilePath -> [DistroName] -> [Html] 145 | listing rootPath distros 146 | = [ h3 << "Distributions" 147 | , ulist << map (distroHtml rootPath) distros 148 | ] 149 | 150 | distroHtml :: FilePath -> DistroName -> Html 151 | distroHtml rootPath distro 152 | = li << anchor ! [href $ rootPath SURI.escape distro ] 153 | << display distro 154 | --------------------------------------------------------------------------------