-- Copyright (C) 2006-2007 David Roundy
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2, or (at your option)
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software Foundation,
-- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.

module Darcs.Repository.Hashed
    ( inventoriesDir
    , inventoriesDirPath
    , pristineDir
    , pristineDirPath
    , patchesDir
    , patchesDirPath
    , hashedInventory
    , hashedInventoryPath

    , revertTentativeChanges
    , revertRepositoryChanges
    , finalizeTentativeChanges
    , cleanPristine
    , filterDirContents
    , cleanInventories
    , cleanPatches
    , copyPristine
    , copyPartialsPristine
    , applyToTentativePristine
    , applyToTentativePristineCwd
    , addToTentativeInventory
    , readRepo
    , readRepoHashed
    , readTentativeRepo
    , writeAndReadPatch
    , writeTentativeInventory
    , copyHashedInventory
    , readHashedPristineRoot
    , pokePristineHash
    , peekPristineHash
    , listInventories
    , listInventoriesLocal
    , listInventoriesRepoDir
    , listPatchesLocalBucketed
    , writePatchIfNecessary
    , diffHashLists
    , withRecorded
    , withTentative
    , tentativelyAddPatch
    , tentativelyRemovePatches
    , tentativelyRemovePatches_
    , tentativelyAddPatch_
    , tentativelyAddPatches_
    , tentativelyReplacePatches
    , finalizeRepositoryChanges
    , unrevertUrl
    , createPristineDirectoryTree
    , createPartialsPristineDirectoryTree
    , reorderInventory
    , cleanRepository
    , UpdatePristine(..)
    , repoXor
    ) where

import Prelude ()
import Darcs.Prelude

import Control.Arrow ( (&&&) )
import Control.Exception ( catch, IOException )
import Darcs.Util.Exception ( catchall )
import Control.Monad ( when, unless, void )
import Data.Maybe
import Data.List( foldl' )

import qualified Data.ByteString as B ( empty, readFile, append )
import qualified Data.ByteString.Char8 as BC ( unpack, pack )
import qualified Data.Set as Set

import Darcs.Util.Hash( encodeBase16, Hash(..), SHA1, sha1Xor, sha1zero )
import Darcs.Util.Prompt ( promptYorn )
import Darcs.Util.Tree( treeHash, Tree )
import Darcs.Util.Tree.Hashed( hashedTreeIO, readDarcsHashedNosize,
                             readDarcsHashed, writeDarcsHashed,
                             decodeDarcsHash, decodeDarcsSize )
import Darcs.Util.SignalHandler ( withSignalsBlocked )

import System.Directory ( createDirectoryIfMissing, getDirectoryContents
                        , doesFileExist, doesDirectoryExist )
import System.FilePath.Posix( (</>) )
import System.IO.Unsafe ( unsafeInterleaveIO )
import System.IO ( stderr, hPutStrLn )

import Darcs.Util.External
    ( copyFileOrUrl
    , cloneFile
    , fetchFilePS
    , gzFetchFilePS
    , Cachable( Uncachable )
    )
import Darcs.Repository.Flags ( Compression, RemoteDarcs, remoteDarcs
    , Verbosity(..), UpdateWorking (..), WithWorkingDir (WithWorkingDir) )

import Darcs.Repository.Format ( RepoProperty( HashedInventory ), formatHas )
import Darcs.Repository.Pending
    ( readPending
    , pendingName
    , tentativelyRemoveFromPending
    , finalizePending
    , setTentativePending
    , prepend
    )
import Darcs.Repository.PatchIndex ( createOrUpdatePatchIndexDisk, doesPatchIndexExist )
import Darcs.Repository.State ( readRecorded, updateIndex )

import Darcs.Util.Global ( darcsdir )
import Darcs.Util.Lock
    ( writeBinFile
    , writeDocBinFile
    , writeAtomicFilePS
    , appendDocBinFile
    , removeFileMayNotExist
    )
import Darcs.Patch.Set ( PatchSet(..), Tagged(..)
                       , SealedPatchSet, Origin
                       , patchSet2RL
                       )

import Darcs.Patch.Show ( ShowPatch, ShowPatchFor(..) )
import Darcs.Patch.PatchInfoAnd
    ( PatchInfoAnd, Hopefully, patchInfoAndPatch, info
    , extractHash, createHashed, hopefully )
import Darcs.Patch ( IsRepoType, RepoPatch, showPatch, apply
                   , description
                   , commuteRL
                   , readPatch
                   , effect
                   , invert
                   )

import Darcs.Patch.Apply ( Apply, ApplyState )

import Darcs.Patch.Bundle ( scanBundle
                          , makeBundleN
                          )
import Darcs.Patch.Named.Wrapped ( namedIsInternal )
import Darcs.Patch.Read ( ReadPatch )
import Darcs.Patch.Depends ( removeFromPatchSet, slightlyOptimizePatchset
                           , mergeThem, splitOnTag )
import Darcs.Patch.Info
    ( PatchInfo, displayPatchInfo, isTag, makePatchname )
import Darcs.Util.Path ( FilePathLike, ioAbsoluteOrRemote, toPath
                       , AbsolutePath, toFilePath )
import Darcs.Repository.Cache ( Cache(..), fetchFileUsingCache,
                                speculateFilesUsingCache, writeFileUsingCache,
                                HashedDir(..), hashedDir, peekInCache, bucketFolder )
import Darcs.Repository.HashedIO ( copyHashed, copyPartialsHashed,
                                   cleanHashdir )
import Darcs.Repository.Inventory
import Darcs.Repository.InternalTypes
    ( Repository
    , repoCache
    , repoFormat
    , repoLocation
    , withRepoLocation
    , coerceT )
import qualified Darcs.Repository.Old as Old ( readOldRepo, oldRepoFailMsg )
import Darcs.Util.File ( withCurrentDirectory )
import Darcs.Patch.Witnesses.Ordered
    ( (+<+), FL(..), RL(..), mapRL, foldFL_M
    , (:>)(..), lengthFL, filterOutFLFL
    , reverseFL, reverseRL )
import Darcs.Patch.Witnesses.Sealed ( Sealed(..), seal, unseal, mapSeal )
import Darcs.Patch.Witnesses.Unsafe ( unsafeCoerceP )

import Darcs.Util.ByteString ( gzReadFilePS )
import Darcs.Util.Printer.Color ( showDoc )
import Darcs.Util.Printer
    ( Doc, hcat, ($$), renderString, renderPS, text, putDocLn, (<+>) )
import Darcs.Util.Progress ( beginTedious, endTedious, debugMessage, finishedOneIO )
import Darcs.Patch.Progress (progressFL)
import Darcs.Util.Workaround ( renameFile )
import Darcs.Repository.Prefs ( globalCacheDir )


makeDarcsdirPath :: String -> String
makeDarcsdirPath name = darcsdir </> name

-- TODO rename xyzPath to xyzLocal to make it clear that it is
-- relative to the local darcsdir

-- Location of the (one and only) head inventory.
hashedInventory, hashedInventoryPath :: String
hashedInventory = "hashed_inventory"
hashedInventoryPath = makeDarcsdirPath hashedInventory

-- Location of the (one and only) tentative head inventory.
tentativeHashedInventory, tentativeHashedInventoryPath :: String
tentativeHashedInventory = "tentative_hashed_inventory"
tentativeHashedInventoryPath = makeDarcsdirPath tentativeHashedInventory

-- Location of parent inventories.
inventoriesDir, inventoriesDirPath :: String
inventoriesDir = "inventories"
inventoriesDirPath = makeDarcsdirPath inventoriesDir

-- Location of pristine trees.
pristineDir, tentativePristinePath, pristineDirPath :: String
tentativePristinePath = makeDarcsdirPath "tentative_pristine"
pristineDir = "pristine.hashed"
pristineDirPath = makeDarcsdirPath pristineDir

-- Location of patches.
patchesDir, patchesDirPath :: String
patchesDir = "patches"
patchesDirPath = makeDarcsdirPath patchesDir

-- | The way patchfiles, inventories, and pristine trees are stored.
-- 'PlainLayout' means all files are in the same directory. 'BucketedLayout'
-- means we create a second level of subdirectories, such that all files whose
-- hash starts with the same two letters are in the same directory.
data DirLayout = PlainLayout | BucketedLayout

-- | 'applyToHashedPristine' takes a root hash, a patch @p@ and attempts to
-- apply the patch to the 'Tree' identified by @h@. If we encounter an old,
-- size-prefixed pristine, we first convert it to the non-size-prefixed format,
-- then apply the patch.
applyToHashedPristine :: (Apply p, ApplyState p ~ Tree) => String -> p wX wY
                      -> IO String
applyToHashedPristine h p = applyOrConvertOldPristineAndApply
  where
    applyOrConvertOldPristineAndApply =
        tryApply hash `catch` \(_ :: IOException) -> handleOldPristineAndApply

    hash = decodeDarcsHash $ BC.pack h

    failOnMalformedRoot (SHA256 _) = return ()
    failOnMalformedRoot root = fail $ "Cannot handle hash: " ++ show root

    hash2root = BC.unpack . encodeBase16

    tryApply :: Hash -> IO String
    tryApply root = do
        failOnMalformedRoot root
        -- Read a non-size-prefixed pristine, failing if we encounter one.
        tree <- readDarcsHashedNosize pristineDirPath root
        (_, updatedTree) <- hashedTreeIO (apply p) tree pristineDirPath
        return . hash2root $ treeHash updatedTree

    warn = "WARNING: Doing a one-time conversion of pristine format.\n"
           ++ "This may take a while. The new format is backwards-compatible."

    handleOldPristineAndApply = do
        hPutStrLn stderr warn
        inv <- gzReadFilePS hashedInventoryPath
        let oldroot = BC.pack $ peekPristineHash inv
            oldrootSizeandHash = (decodeDarcsSize &&& decodeDarcsHash) oldroot
        -- Read the old size-prefixed pristine tree
        old <- readDarcsHashed pristineDirPath oldrootSizeandHash
        -- Write out the pristine tree as a non-size-prefixed pristine.
        root <- writeDarcsHashed old pristineDirPath
        let newroot = hash2root root
        -- Write out the new inventory.
        writeDocBinFile hashedInventoryPath $ pokePristineHash newroot inv
        cleanHashdir (Ca []) HashedPristineDir [newroot]
        hPutStrLn stderr "Pristine conversion done..."
        -- Retry applying the patch, which should now succeed.
        tryApply root

-- |revertTentativeChanges swaps the tentative and "real" hashed inventory
-- files, and then updates the tentative pristine with the "real" inventory
-- hash.
revertTentativeChanges :: IO ()
revertTentativeChanges = do
    cloneFile hashedInventoryPath tentativeHashedInventoryPath
    i <- gzReadFilePS hashedInventoryPath
    writeBinFile tentativePristinePath $ B.append pristineName (BC.pack (peekPristineHash i))

-- |finalizeTentativeChanges trys to atomically swap the tentative
-- inventory/pristine pointers with the "real" pointers; it first re-reads the
-- inventory to optimize it, presumably to take account of any new tags, and
-- then writes out the new tentative inventory, and finally does the atomic
-- swap. In general, we can't clean the pristine cache at the same time, since
-- a simultaneous get might be in progress.
finalizeTentativeChanges :: (IsRepoType rt, RepoPatch p)
                         => Repository rt p wR wU wT -> Compression -> IO ()
finalizeTentativeChanges r compr = do
    debugMessage "Optimizing the inventory..."
    -- Read the tentative patches
    ps <- readTentativeRepo r "."
    writeTentativeInventory (repoCache r) compr ps
    i <- gzReadFilePS tentativeHashedInventoryPath
    p <- gzReadFilePS tentativePristinePath
    -- Write out the "optimised" tentative inventory.
    writeDocBinFile tentativeHashedInventoryPath $ pokePristineHash (peekPristineHash p) i
    -- Atomically swap.
    renameFile tentativeHashedInventoryPath hashedInventoryPath

-- |readHashedPristineRoot attempts to read the pristine hash from the current
-- inventory, returning Nothing if it cannot do so.
readHashedPristineRoot :: Repository rt p wR wU wT -> IO (Maybe String)
readHashedPristineRoot r = withRepoLocation r $ do
    i <- (Just <$> gzReadFilePS hashedInventoryPath)
         `catch` (\(_ :: IOException) -> return Nothing)
    return $ peekPristineHash <$> i

-- |cleanPristine removes any obsolete (unreferenced) entries in the pristine
-- cache.
cleanPristine :: Repository rt p wR wU wT -> IO ()
cleanPristine r = withRepoLocation r $ do
    debugMessage "Cleaning out the pristine cache..."
    i <- gzReadFilePS hashedInventoryPath
    cleanHashdir (repoCache r) HashedPristineDir [peekPristineHash i]

-- |filterDirContents returns the contents of the directory @d@
-- except files whose names begin with '.' (directories . and ..,
-- hidden files) and files whose names are filtered by the function @f@, if
-- @dir@ is empty, no paths are returned.
filterDirContents :: FilePath -> (FilePath -> Bool) -> IO [FilePath]
filterDirContents d f = do
    let realPath = makeDarcsdirPath d
    exists <- doesDirectoryExist realPath
    if exists
        then filter (\x -> head x /= '.' && f x) <$>
            getDirectoryContents realPath
        else return []

-- | Set difference between two lists of hashes.
diffHashLists :: [String] -> [String] -> [String]
diffHashLists xs ys = from_set $ (to_set xs) `Set.difference` (to_set ys)
  where
    to_set = Set.fromList . map BC.pack
    from_set = map BC.unpack . Set.toList

-- |cleanInventories removes any obsolete (unreferenced) files in the
-- inventories directory.
cleanInventories :: Repository rt p wR wU wT -> IO ()
cleanInventories _ = do
    debugMessage "Cleaning out inventories..."
    hs <- listInventoriesLocal
    fs <- filterDirContents inventoriesDir (const True)
    mapM_ (removeFileMayNotExist . (inventoriesDirPath </>))
        (diffHashLists fs hs)

-- FIXME this is ugly, these files should be directly under _darcs
-- since they are not hashed. And 'unrevert' isn't even a real patch but
-- a patch bundle.
-- |specialPatches list of special patch files that may exist in the directory
-- _darcs/patches/.
specialPatches :: [FilePath]
specialPatches = ["unrevert", "pending", "pending.tentative"]

-- |cleanPatches removes any obsolete (unreferenced) files in the
-- patches directory.
cleanPatches :: Repository rt p wR wU wT -> IO ()
cleanPatches _ = do
    debugMessage "Cleaning out patches..."
    hs <- listPatchesLocal PlainLayout darcsdir darcsdir
    fs <- filterDirContents patchesDir (`notElem` specialPatches)
    mapM_ (removeFileMayNotExist . (patchesDirPath </>))
        (diffHashLists fs hs)


-- |addToSpecificInventory adds a patch to a specific inventory file, and
-- returns the FilePath whichs corresponds to the written-out patch.
addToSpecificInventory :: RepoPatch p => String -> Cache -> Compression
                       -> PatchInfoAnd rt p wX wY -> IO FilePath
addToSpecificInventory invPath c compr p = do
    let invFile = makeDarcsdirPath invPath
    hash <- snd <$> writePatchIfNecessary c compr p
    appendDocBinFile invFile $
      showInventoryEntry (info p, hash)
    return $ patchesDirPath </> getValidHash hash

-- | Warning: this allows to add any arbitrary patch! Used by convert import.
addToTentativeInventory :: RepoPatch p => Cache -> Compression
                        -> PatchInfoAnd rt p wX wY -> IO FilePath
addToTentativeInventory = addToSpecificInventory tentativeHashedInventory

-- | Attempt to remove an FL of patches from the tentative inventory.
-- This is used for commands that wish to modify already-recorded patches.
--
-- Precondition: it must be possible to remove the patches, i.e.
--
-- * the patches are in the repository
--
-- * any necessary commutations will succeed
removeFromTentativeInventory :: (IsRepoType rt, RepoPatch p)
                             => Repository rt p wR wU wT -> Compression
                             -> FL (PatchInfoAnd rt p) wX wT -> IO ()
removeFromTentativeInventory repo compr to_remove = do
    debugMessage $ "Start removeFromTentativeInventory"
    allpatches <- readTentativeRepo repo "."
    remaining <- case removeFromPatchSet to_remove allpatches of
        Nothing -> bug "Hashed.removeFromTentativeInventory: precondition violated"
        Just r -> return r
    writeTentativeInventory (repoCache repo) compr remaining
    debugMessage $ "Done removeFromTentativeInventory"

-- |writeHashFile takes a Doc and writes it as a hash-named file, returning the
-- filename that the contents were written to.
writeHashFile :: Cache -> Compression -> HashedDir -> Doc -> IO String
writeHashFile c compr subdir d = do
    debugMessage $ "Writing hash file to " ++ hashedDir subdir
    writeFileUsingCache c compr subdir $ renderPS d

-- |readRepo returns the "current" repo patchset.
readRepoHashed :: (IsRepoType rt, RepoPatch p) => Repository rt p wR wU wT
               -> String -> IO (PatchSet rt p Origin wR)
readRepoHashed = readRepoUsingSpecificInventory hashedInventory

-- |readRepo returns the tentative repo patchset.
readTentativeRepo :: (IsRepoType rt, RepoPatch p)
                  => Repository rt p wR wU wT -> String
                  -> IO (PatchSet rt p Origin wT)
readTentativeRepo = readRepoUsingSpecificInventory tentativeHashedInventory

-- |readRepoUsingSpecificInventory uses the inventory at @invPath@ to read the
-- repository @repo@.
readRepoUsingSpecificInventory :: (IsRepoType rt, RepoPatch p)
                               => String -> Repository rt p wR wU wT
                               -> String -> IO (PatchSet rt p Origin wS)
readRepoUsingSpecificInventory invPath repo dir = do
    realdir <- toPath <$> ioAbsoluteOrRemote dir
    Sealed ps <- readRepoPrivate (repoCache repo) realdir invPath
                 `catch` \e -> do
                     hPutStrLn stderr ("Invalid repository: " ++ realdir)
                     ioError e
    return $ unsafeCoerceP ps
  where
    readRepoPrivate :: (IsRepoType rt, RepoPatch p) => Cache -> FilePath
                    -> FilePath -> IO (SealedPatchSet rt p Origin)
    readRepoPrivate cache d iname = do
      inventory <- readInventoryPrivate (d </> darcsdir </> iname)
      readRepoFromInventoryList cache inventory

-- | Read a 'PatchSet' from the repository (assumed to be located at the
-- current working directory) by following the chain of 'Inventory's, starting
-- with the given one. The 'Cache' parameter is used to locate patches and parent
-- inventories, since not all of them need be present inside the current repo.
readRepoFromInventoryList
  :: (IsRepoType rt, RepoPatch p)
  => Cache
  -> Inventory
  -> IO (SealedPatchSet rt p Origin)
readRepoFromInventoryList cache = parseInv
  where
    parseInv :: (IsRepoType rt, RepoPatch p)
             => Inventory
             -> IO (SealedPatchSet rt p Origin)
    parseInv (Inventory Nothing ris) =
        mapSeal (PatchSet NilRL) <$> read_patches (reverse ris)
    parseInv (Inventory (Just h) []) =
        -- TODO could be more tolerant and create a larger PatchSet
        bug $ "bad inventory " ++ getValidHash h ++ " (no tag) in parseInv!"
    parseInv (Inventory (Just h) (t : ris)) = do
        Sealed ts <- unseal seal <$> unsafeInterleaveIO (read_ts t h)
        Sealed ps <- unseal seal <$>
                        unsafeInterleaveIO (read_patches $ reverse ris)
        return $ seal $ PatchSet ts ps

    read_patches :: (IsRepoType rt, RepoPatch p) => [InventoryEntry]
                 -> IO (Sealed (RL (PatchInfoAnd rt p) wX))
    read_patches [] = return $ seal NilRL
    read_patches allis@((i1, h1) : is1) =
        lift2Sealed (\p rest -> rest :<: i1 `patchInfoAndPatch` p) (rp is1)
                    (createValidHashed h1 (const $ speculateAndParse h1 allis i1))
      where
        rp :: (IsRepoType rt, RepoPatch p) => [InventoryEntry]
           -> IO (Sealed (RL (PatchInfoAnd rt p) wX))
        rp [] = return $ seal NilRL
        rp [(i, h), (il, hl)] =
            lift2Sealed (\p rest -> rest :<: i `patchInfoAndPatch` p)
                        (rp [(il, hl)])
                        (createValidHashed h
                            (const $ speculateAndParse h (reverse allis) i))
        rp ((i, h) : is) =
            lift2Sealed (\p rest -> rest :<: i `patchInfoAndPatch` p)
                        (rp is)
                        (createValidHashed h (parse i))

    lift2Sealed :: (forall wY wZ . q wY wZ -> p wX wY -> r wX wZ)
                -> IO (Sealed (p wX))
                -> (forall wB . IO (Sealed (q wB)))
                -> IO (Sealed (r wX))
    lift2Sealed f iox ioy = do
        Sealed x <- unseal seal <$> unsafeInterleaveIO iox
        Sealed y <- unseal seal <$> unsafeInterleaveIO ioy
        return $ seal $ f y x

    speculateAndParse h is i = speculate h is >> parse i h

    speculate :: PatchHash -> [InventoryEntry] -> IO ()
    speculate h is = do
        already_got_one <- peekInCache cache HashedPatchesDir (getValidHash h)
        unless already_got_one $
            speculateFilesUsingCache cache HashedPatchesDir (map (getValidHash . snd) is)

    parse :: ReadPatch p => PatchInfo -> PatchHash -> IO (Sealed (p wX))
    parse i h = do
        debugMessage ("Reading patch file: "++ showDoc (displayPatchInfo i))
        (fn, ps) <- fetchFileUsingCache cache HashedPatchesDir (getValidHash h)
        case readPatch ps of
            Just p -> return p
            Nothing -> fail $ unlines [ "Couldn't parse file " ++ fn
                                      , "which is patch"
                                      , renderString $ displayPatchInfo i ]

    read_ts :: (IsRepoType rt, RepoPatch p) => InventoryEntry
            -> InventoryHash -> IO (Sealed (RL (Tagged rt p) Origin))
    read_ts tag0 h0 = do
        contents <- unsafeInterleaveIO $ readTaggedInventoryFromHash (getValidHash h0)
        let is = reverse $ case contents of
                               (Inventory (Just _) (_ : ris0)) -> ris0
                               (Inventory Nothing ris0) -> ris0
                               (Inventory (Just _) []) -> bug "inventory without tag!"
        Sealed ts <- unseal seal <$>
                         unsafeInterleaveIO
                            (case contents of
                                 (Inventory (Just h') (t' : _)) -> read_ts t' h'
                                 (Inventory (Just _) []) -> bug "inventory without tag!"
                                 (Inventory Nothing _) -> return $ seal NilRL)
        Sealed ps <- unseal seal <$> unsafeInterleaveIO (read_patches is)
        Sealed tag00 <- read_tag tag0
        return $ seal $ ts :<: Tagged tag00 (Just (getValidHash h0)) ps

    read_tag :: (IsRepoType rt, RepoPatch p) => InventoryEntry
             -> IO (Sealed (PatchInfoAnd rt p wX))
    read_tag (i, h) =
        mapSeal (patchInfoAndPatch i) <$> createValidHashed h (parse i)

    readTaggedInventoryFromHash :: String
                                -> IO Inventory
    readTaggedInventoryFromHash invHash = do
        (fileName, pristineAndInventory) <-
            fetchFileUsingCache cache HashedInventoriesDir invHash
        case parseInventory pristineAndInventory of
          Just r -> return r
          Nothing -> fail $ unwords ["parse error in file", fileName]

-- | Read an inventory from a file. Fails with an error message if
-- file is not there or cannot be parsed.
readInventoryPrivate :: FilePath
                     -> IO Inventory
readInventoryPrivate path = do
    inv <- skipPristineHash <$> gzFetchFilePS path Uncachable
    case parseInventory inv of
      Just r -> return r
      Nothing -> fail $ unwords ["parse error in file", path]

-- |copyRepo copies the hashed inventory of @repo@ to the repository located at
-- @remote@.
copyHashedInventory :: Repository rt p wR wU wT -> RemoteDarcs -> String -> IO ()
copyHashedInventory outrepo rdarcs inloc | remote <- remoteDarcs rdarcs = do
    let outloc = repoLocation outrepo
    createDirectoryIfMissing False (outloc ++ "/" ++ inventoriesDirPath)
    copyFileOrUrl remote (inloc </> hashedInventoryPath)
                         (outloc </> hashedInventoryPath)
                  Uncachable -- no need to copy anything but hashed_inventory!
    debugMessage "Done copying hashed inventory."

-- |writeAndReadPatch makes a patch lazy, by writing it out to disk (thus
-- forcing it), and then re-reads the patch lazily.
writeAndReadPatch :: (IsRepoType rt, RepoPatch p) => Cache -> Compression
                  -> PatchInfoAnd rt p wX wY -> IO (PatchInfoAnd rt p wX wY)
writeAndReadPatch c compr p = do
    (i, h) <- writePatchIfNecessary c compr p
    unsafeInterleaveIO $ readp h i
  where
    parse i h = do
        debugMessage ("Rereading patch file: "++ showDoc (displayPatchInfo i))
        (fn, ps) <- fetchFileUsingCache c HashedPatchesDir (getValidHash h)
        case readPatch ps of
            Just x -> return x
            Nothing -> fail $ unlines [ "Couldn't parse patch file " ++ fn
                                      , "which is"
                                      , renderString $ displayPatchInfo i]

    readp h i = do Sealed x <- createValidHashed h (parse i)
                   return . patchInfoAndPatch i $ unsafeCoerceP x

createValidHashed :: PatchHash
                  -> (PatchHash -> IO (Sealed (a wX)))
                  -> IO (Sealed (Darcs.Patch.PatchInfoAnd.Hopefully a wX))
createValidHashed h f = createHashed (getValidHash h) (f . mkValidHash)

-- | writeTentativeInventory writes @patchSet@ as the tentative inventory.
writeTentativeInventory :: RepoPatch p => Cache -> Compression
                        -> PatchSet rt p Origin wX -> IO ()
writeTentativeInventory cache compr patchSet = do
    debugMessage "in writeTentativeInventory..."
    createDirectoryIfMissing False inventoriesDirPath
    beginTedious tediousName
    hsh <- writeInventoryPrivate $ slightlyOptimizePatchset patchSet
    endTedious tediousName
    debugMessage "still in writeTentativeInventory..."
    case hsh of
        Nothing -> writeBinFile (makeDarcsdirPath tentativeHashedInventory) B.empty
        Just h -> do
            content <- snd <$> fetchFileUsingCache cache HashedInventoriesDir h
            writeAtomicFilePS (makeDarcsdirPath tentativeHashedInventory) content
  where
    tediousName = "Writing inventory"
    writeInventoryPrivate :: RepoPatch p => PatchSet rt p Origin wX
                          -> IO (Maybe String)
    writeInventoryPrivate (PatchSet NilRL NilRL) = return Nothing
    writeInventoryPrivate (PatchSet NilRL ps) = do
        inventory <- sequence $ mapRL (writePatchIfNecessary cache compr) ps
        let inventorylist = showInventoryPatches (reverse inventory)
        hash <- writeHashFile cache compr HashedInventoriesDir inventorylist
        return $ Just hash
    writeInventoryPrivate
        (PatchSet xs@(_ :<: Tagged t _ _) x) = do
        resthash <- write_ts xs
        finishedOneIO tediousName $ fromMaybe "" resthash
        inventory <- sequence $ mapRL (writePatchIfNecessary cache compr)
                                    (NilRL :<: t +<+ x)
        let inventorylist = hcat (map showInventoryEntry $ reverse inventory)
            inventorycontents =
                case resthash of
                    Just h -> text ("Starting with inventory:\n" ++ h) $$
                                  inventorylist
                    Nothing -> inventorylist
        hash <- writeHashFile cache compr HashedInventoriesDir inventorycontents
        return $ Just hash
      where
        -- | write_ts writes out a tagged patchset. If it has already been
        -- written, we'll have the hash, so we can immediately return it.
        write_ts :: RepoPatch p => RL (Tagged rt p) Origin wX
                 -> IO (Maybe String)
        write_ts (_ :<: Tagged _ (Just h) _) = return (Just h)
        write_ts (tts :<: Tagged _ Nothing pps) =
            writeInventoryPrivate $ PatchSet tts pps
        write_ts NilRL = return Nothing

-- |writeHashIfNecessary writes the patch and returns the resulting info/hash,
-- if it has not already been written. If it has been written, we have the hash
-- in the PatchInfoAnd, so we extract and return the info/hash.
writePatchIfNecessary :: RepoPatch p => Cache -> Compression
                      -> PatchInfoAnd rt p wX wY -> IO InventoryEntry
writePatchIfNecessary c compr hp = infohp `seq`
    case extractHash hp of
        Right h -> return (infohp, mkValidHash h)
        Left p -> do
          h <- writeHashFile c compr HashedPatchesDir (showPatch ForStorage p)
          return (infohp, mkValidHash h)
  where
    infohp = info hp

-- |listInventoriesWith returns a list of the inventories hashes.
-- The first argument is to choose directory format.
-- The first argument can be readInventoryPrivate or readInventoryLocalPrivate.
-- The second argument specifies whether the files are expected
-- to be stored in plain or in bucketed format.
-- The third argument is the directory of the parent inventory files.
-- The fourth argument is the directory of the head inventory file.
listInventoriesWith
  :: (FilePath -> IO Inventory)
  -> DirLayout
  -> String -> String -> IO [String]
listInventoriesWith readInv dirformat baseDir startDir = do
    mbStartingWithInv <- getStartingWithHash startDir hashedInventory
    followStartingWiths mbStartingWithInv
  where
    getStartingWithHash dir file = inventoryParent <$> readInv (dir </> file)

    invDir = baseDir </> inventoriesDir
    nextDir dir = case dirformat of
        BucketedLayout -> invDir </> bucketFolder dir
        PlainLayout -> invDir

    followStartingWiths Nothing = return []
    followStartingWiths (Just hash) = do
        let startingWith = getValidHash hash
        mbNextInv <- getStartingWithHash (nextDir startingWith) startingWith
        (startingWith :) <$> followStartingWiths mbNextInv

-- |listInventories returns a list of the inventories hashes.
-- This function attempts to retrieve missing inventory files.
listInventories :: IO [String]
listInventories =
    listInventoriesWith readInventoryPrivate PlainLayout darcsdir darcsdir

-- | Read the given inventory file if it exist, otherwise return an empty
-- inventory. Used when we expect that some inventory files may be missing.
readInventoryLocalPrivate :: FilePath -> IO Inventory
readInventoryLocalPrivate path = do
    b <- doesFileExist path
    if b then readInventoryPrivate path
        else return emptyInventory

-- | Return inventories hashes by following the head inventory.
-- This function does not attempt to retrieve missing inventory files.
listInventoriesLocal :: IO [String]
listInventoriesLocal =
    listInventoriesWith readInventoryLocalPrivate PlainLayout darcsdir darcsdir

-- |listInventoriesRepoDir returns a list of the inventories hashes.
-- The argument @repoDir@ is the directory of the repository from which
-- we are going to read the head inventory file.
-- The rest of hashed files are read from the global cache.
listInventoriesRepoDir :: String -> IO [String]
listInventoriesRepoDir repoDir = do
    gCacheDir' <- globalCacheDir
    let gCacheInvDir = fromJust gCacheDir'
    listInventoriesWith
      readInventoryLocalPrivate BucketedLayout gCacheInvDir (repoDir </> darcsdir)

-- | Return a list of the patch filenames, extracted from inventory
-- files, by starting with the head inventory and then following the
-- chain of parent inventories.
--
-- This function does not attempt to download missing inventory files.
--
-- * The first argument specifies whether the files are expected
--   to be stored in plain or in bucketed format.
-- * The second argument is the directory of the parent inventory.
-- * The third argument is the directory of the head inventory.
listPatchesLocal :: DirLayout -> String -> String -> IO [String]
listPatchesLocal dirformat baseDir startDir = do
    inventory <- readInventoryPrivate (startDir </> hashedInventory)
    followStartingWiths (inventoryParent inventory) (inventoryPatchNames inventory)
    where
        invDir = baseDir </> inventoriesDir
        nextDir dir = case dirformat of
            BucketedLayout -> invDir </> bucketFolder dir
            PlainLayout -> invDir

        followStartingWiths Nothing patches = return patches
        followStartingWiths (Just hash) patches = do
            let startingWith = getValidHash hash
            inv <- readInventoryLocalPrivate
                (nextDir startingWith </> startingWith)
            (patches++) <$> followStartingWiths (inventoryParent inv) (inventoryPatchNames inv)

-- |listPatchesLocalBucketed is similar to listPatchesLocal, but
-- it read the inventory directory under @darcsDir@ in bucketed format.
listPatchesLocalBucketed :: String -> String -> IO [String]
listPatchesLocalBucketed = listPatchesLocal BucketedLayout

-- | copyPristine copies a pristine tree into the current pristine dir,
--   and possibly copies a clean working copy.
--   The target is read from the passed-in dir/inventory name combination.
copyPristine :: Cache -> String -> String -> WithWorkingDir -> IO ()
copyPristine cache dir iname wwd = do
    i <- fetchFilePS (dir ++ "/" ++ iname) Uncachable
    debugMessage $ "Copying hashed pristine tree: " ++ peekPristineHash i
    let tediousName = "Copying pristine"
    beginTedious tediousName
    copyHashed tediousName cache wwd $ peekPristineHash i
    endTedious tediousName

-- |copyPartialsPristine copies the pristine entries for a given list of
-- filepaths.
copyPartialsPristine :: FilePathLike fp => Cache -> String
                     -> String -> [fp] -> IO ()
copyPartialsPristine c d iname fps = do
    i <- fetchFilePS (d ++ "/" ++ iname) Uncachable
    copyPartialsHashed c (peekPristineHash i) fps

unrevertUrl :: Repository rt p wR wU wT -> String
unrevertUrl r = repoLocation r ++ "/"++darcsdir++"/patches/unrevert"

tentativelyAddPatch :: (RepoPatch p, ApplyState p ~ Tree)
                    => Repository rt p wR wU wT
                    -> Compression
                    -> Verbosity
                    -> UpdateWorking
                    -> PatchInfoAnd rt p wT wY
                    -> IO (Repository rt p wR wU wY)
tentativelyAddPatch = tentativelyAddPatch_ UpdatePristine

data UpdatePristine = UpdatePristine
                    | DontUpdatePristine
                    | DontUpdatePristineNorRevert deriving Eq

tentativelyAddPatches_ :: (RepoPatch p, ApplyState p ~ Tree)
                       => UpdatePristine
                       -> Repository rt p wR wU wT
                       -> Compression
                       -> Verbosity
                       -> UpdateWorking
                       -> FL (PatchInfoAnd rt p) wT wY
                       -> IO (Repository rt p wR wU wY)
tentativelyAddPatches_ up r c v uw ps =
    foldFL_M (\r' p -> tentativelyAddPatch_ up r' c v uw p) r ps

-- TODO re-add a safety catch for --dry-run? Maybe using a global, like dryRun
-- :: Bool, with dryRun = unsafePerformIO $ readIORef ...
tentativelyAddPatch_ :: (RepoPatch p, ApplyState p ~ Tree)
                     => UpdatePristine
                     -> Repository rt p wR wU wT
                     -> Compression
                     -> Verbosity
                     -> UpdateWorking
                     -> PatchInfoAnd rt p wT wY
                     -> IO (Repository rt p wR wU wY)

tentativelyAddPatch_ up r compr verb uw p =
    withRepoLocation r $ do
       void $ addToTentativeInventory (repoCache r) compr p
       when (up == UpdatePristine) $ do debugMessage "Applying to pristine cache..."
                                        applyToTentativePristine r verb p
                                        debugMessage "Updating pending..."
                                        tentativelyRemoveFromPending r uw p
       return (coerceT r)


-- |applyToTentativePristine applies a patch @p@ to the tentative pristine
-- tree, and updates the tentative pristine hash
applyToTentativePristine :: (ApplyState q ~ Tree, Apply q, ShowPatch q)
                         => Repository rt p wR wU wT
                         -> Verbosity
                         -> q wT wY
                         -> IO ()
applyToTentativePristine r verb p =
    withRepoLocation r $
    do when (verb == Verbose) $ putDocLn $ text "Applying to pristine..." <+> description p
       applyToTentativePristineCwd p

applyToTentativePristineCwd :: (ApplyState p ~ Tree, Apply p) => p wX wY
                            -> IO ()
applyToTentativePristineCwd p = do
    tentativePristine <- gzReadFilePS tentativePristinePath
    -- Extract the pristine hash from the tentativePristine file, using
    -- peekPristineHash (this is valid since we normally just extract the hash from the
    -- first line of an inventory file; we can pass in a one-line file that
    -- just contains said hash).
    let tentativePristineHash = peekPristineHash tentativePristine
    newPristineHash <- applyToHashedPristine tentativePristineHash p
    writeDocBinFile tentativePristinePath $
        pokePristineHash newPristineHash tentativePristine

tentativelyRemovePatches :: (IsRepoType rt, RepoPatch p, ApplyState p ~ Tree)
                         => Repository rt p wR wU wT
                         -> Compression
                         -> UpdateWorking
                         -> FL (PatchInfoAnd rt p) wX wT
                         -> IO (Repository rt p wR wU wX)
tentativelyRemovePatches = tentativelyRemovePatches_ UpdatePristine

tentativelyRemovePatches_ :: (IsRepoType rt, RepoPatch p, ApplyState p ~ Tree)
                          => UpdatePristine
                          -> Repository rt p wR wU wT
                          -> Compression
                          -> UpdateWorking
                          -> FL (PatchInfoAnd rt p) wX wT
                          -> IO (Repository rt p wR wU wX)
tentativelyRemovePatches_ up r compr uw ps =
    withRepoLocation r $ do
      when (up == UpdatePristine) $ do debugMessage "Adding changes to pending..."
                                       prepend r uw $ effect ps
      unless (up == DontUpdatePristineNorRevert) $ removeFromUnrevertContext r ps
      debugMessage "Removing changes from tentative inventory..."
      if formatHas HashedInventory (repoFormat r)
        then do removeFromTentativeInventory r compr ps
                when (up == UpdatePristine) $
                     applyToTentativePristineCwd $
                     progressFL "Applying inverse to pristine" $ invert ps
        else fail Old.oldRepoFailMsg
      return (coerceT r)

-- FIXME this is a rather weird API. If called with a patch that isn't already
-- in the repo, it fails with an obscure error from 'commuteToEnd'. It also
-- ends up redoing the work that the caller has already done - if it has
-- already commuted these patches to the end, it must also know the commuted
-- versions of the other patches in the repo.
-- |Given a sequence of patches anchored at the end of the current repository,
-- actually pull them to the end of the repository by removing any patches
-- with the same name and then adding the passed in sequence.
-- Typically callers will have obtained the passed in sequence using
-- 'findCommon' and friends.
tentativelyReplacePatches :: forall rt p wR wU wT wX
                           . (IsRepoType rt, RepoPatch p, ApplyState p ~ Tree)
                          => Repository rt p wR wU wT
                          -> Compression
                          -> UpdateWorking
                          -> Verbosity
                          -> FL (PatchInfoAnd rt p) wX wT
                          -> IO ()
tentativelyReplacePatches repository compr uw verb ps =
    do let ps' = filterOutFLFL (namedIsInternal . hopefully) ps
       repository' <- tentativelyRemovePatches_ DontUpdatePristineNorRevert repository compr uw ps'
       mapAdd repository' ps'
  where mapAdd :: Repository rt p wM wL wI
               -> FL (PatchInfoAnd rt p) wI wJ
               -> IO ()
        mapAdd _ NilFL = return ()
        mapAdd r (a:>:as) =
               do r' <- tentativelyAddPatch_ DontUpdatePristine r compr verb uw a
                  mapAdd r' as

-- The type here should rather be
--  ... -> Repo rt p wR wU wT -> IO (Repo rt p wT wU wT)
-- In other words: we set the recorded state to the tentative state.
finalizeRepositoryChanges :: (IsRepoType rt, RepoPatch p, ApplyState p ~ Tree)
                          => Repository rt p wR wU wT
                          -> UpdateWorking
                          -> Compression
                          -> IO ()
finalizeRepositoryChanges r updateWorking compr
    | formatHas HashedInventory (repoFormat r) =
        withRepoLocation r $ do
            debugMessage "Finalizing changes..."
            withSignalsBlocked $ do
                 finalizeTentativeChanges r compr
                 recordedState <- readRecorded r
                 finalizePending r updateWorking recordedState
            debugMessage "Done finalizing changes..."
            ps <- readRepo r
            doesPatchIndexExist (repoLocation r) >>= (`when` createOrUpdatePatchIndexDisk r ps)
            updateIndex r
    | otherwise = fail Old.oldRepoFailMsg

-- TODO: rename this and document the transaction protocol (revert/finalize)
-- clearly.
-- |Slightly confusingly named: as well as throwing away any tentative
-- changes, revertRepositoryChanges also re-initialises the tentative state.
-- It's therefore used before makign any changes to the repo.
-- So the type should rather be
--
-- > ... -> Repo rt p wR wU wT -> IO (Repo rt p wR wU wR)
revertRepositoryChanges :: RepoPatch p
                        => Repository rt p wR wU wT
                        -> UpdateWorking
                        -> IO ()
revertRepositoryChanges r uw
 | formatHas HashedInventory (repoFormat r) =
    withRepoLocation r $
    do removeFileMayNotExist (pendingName ++ ".tentative")
       Sealed x <- readPending r
       setTentativePending r uw x
       when (uw == NoUpdateWorking) $ removeFileMayNotExist pendingName
       revertTentativeChanges
 | otherwise = fail Old.oldRepoFailMsg

removeFromUnrevertContext :: forall rt p wR wU wT wX
                           . (IsRepoType rt, RepoPatch p, ApplyState p ~ Tree)
                          => Repository rt p wR wU wT
                          -> FL (PatchInfoAnd rt p) wX wT
                          -> IO ()
removeFromUnrevertContext r ps = do
  Sealed bundle <- unrevert_patch_bundle `catchall` return (seal (PatchSet NilRL NilRL))
  remove_from_unrevert_context_ bundle
  where unrevert_impossible =
            do confirmed <- promptYorn "This operation will make unrevert impossible!\nProceed?"
               if confirmed then removeFileMayNotExist (unrevertUrl r)
                            else fail "Cancelled."
        unrevert_patch_bundle :: IO (SealedPatchSet rt p Origin)
        unrevert_patch_bundle = do pf <- B.readFile (unrevertUrl r)
                                   case scanBundle pf of
                                     Right foo -> return foo
                                     Left err -> fail $ "Couldn't parse unrevert patch:\n" ++ err
        remove_from_unrevert_context_ :: PatchSet rt p Origin wZ -> IO ()
        remove_from_unrevert_context_ (PatchSet NilRL NilRL) = return ()
        remove_from_unrevert_context_ bundle =
         do debugMessage "Adjusting the context of the unrevert changes..."
            debugMessage $ "Removing "++ show (lengthFL ps) ++
                                  " patches in removeFromUnrevertContext!"
            ref <- readTentativeRepo r (repoLocation r)
            let withSinglet :: Sealed (FL ppp wXxx)
                            -> (forall wYyy . ppp wXxx wYyy -> IO ()) -> IO ()
                withSinglet (Sealed (x :>: NilFL)) j = j x
                withSinglet _ _ = return ()
            withSinglet (mergeThem ref bundle) $ \h_us ->
                  case commuteRL (reverseFL ps :> h_us) of
                    Nothing -> unrevert_impossible
                    Just (us' :> _) ->
                      case removeFromPatchSet ps ref of
                      Nothing -> unrevert_impossible
                      Just common ->
                          do debugMessage "Have now found the new context..."
                             bundle' <- makeBundleN Nothing common (hopefully us':>:NilFL)
                             writeDocBinFile (unrevertUrl r) bundle'
            debugMessage "Done adjusting the context of the unrevert changes!"

cleanRepository :: Repository rt p wR wU wT -> IO ()
cleanRepository r = cleanPristine r >> cleanInventories r >> cleanPatches r

-- | grab the pristine hash of _darcs/hash_inventory, and retrieve whole pristine tree,
--   possibly writing a clean working copy in the process.
createPristineDirectoryTree :: Repository rt p wR wU wT -> FilePath -> WithWorkingDir -> IO ()
createPristineDirectoryTree r reldir wwd
    | formatHas HashedInventory (repoFormat r) =
        do createDirectoryIfMissing True reldir
           withCurrentDirectory reldir $
              copyPristine (repoCache r) (repoLocation r) hashedInventoryPath wwd
    | otherwise = fail Old.oldRepoFailMsg

-- fp below really should be FileName
-- | Used by the commands dist and diff
createPartialsPristineDirectoryTree :: (FilePathLike fp)
                                    => Repository rt p wR wU wT
                                    -> [fp]
                                    -> FilePath
                                    -> IO ()
createPartialsPristineDirectoryTree r prefs dir
    | formatHas HashedInventory (repoFormat r) =
        do createDirectoryIfMissing True dir
           withCurrentDirectory dir $
            copyPartialsPristine (repoCache r) (repoLocation r)
              hashedInventoryPath prefs
    | otherwise = fail Old.oldRepoFailMsg

withRecorded :: Repository rt p wR wU wT
             -> ((AbsolutePath -> IO a) -> IO a)
             -> (AbsolutePath -> IO a)
             -> IO a
withRecorded repository mk_dir f
    = mk_dir $ \d -> do createPristineDirectoryTree repository (toFilePath d) WithWorkingDir
                        f d

withTentative :: forall rt p a wR wU wT.
                 Repository rt p wR wU wT
              -> ((AbsolutePath -> IO a) -> IO a)
              -> (AbsolutePath -> IO a)
              -> IO a
withTentative r mk_dir f
    | formatHas HashedInventory (repoFormat r) =
        mk_dir $ \d -> do copyPristine
                              (repoCache r)
                              (repoLocation r)
                              (darcsdir++"/tentative_pristine")
                              WithWorkingDir
                          f d
    | otherwise = fail Old.oldRepoFailMsg

-- | Writes out a fresh copy of the inventory that minimizes the
-- amount of inventory that need be downloaded when people pull from
-- the repository.
--
-- Specifically, it breaks up the inventory on the most recent tag.
-- This speeds up most commands when run remotely, both because a
-- smaller file needs to be transfered (only the most recent
-- inventory).  It also gives a guarantee that all the patches prior
-- to a given tag are included in that tag, so less commutation and
-- history traversal is needed.  This latter issue can become very
-- important in large repositories.
reorderInventory :: (IsRepoType rt, RepoPatch p, ApplyState p ~ Tree)
                 => Repository rt p wR wU wR
                 -> Compression
                 -> UpdateWorking
                 -> Verbosity
                 -> IO ()
reorderInventory repository compr uw verb = do
        debugMessage "Reordering the inventory."
        PatchSet _ ps <- misplacedPatches `fmap` readRepo repository
        tentativelyReplacePatches repository compr uw verb $ reverseRL ps
        finalizeTentativeChanges repository compr
        debugMessage "Done reordering the inventory."

-- | Returns the patches that make the most recent tag dirty.
misplacedPatches :: forall rt p wS wX . RepoPatch p
                 => PatchSet rt p wS wX
                 -> PatchSet rt p wS wX
misplacedPatches ps =
        -- Filter the repository keeping only with the tags, ordered from the
        -- most recent.
        case filter isTag $ mapRL info $ patchSet2RL ps of
                [] -> ps
                (lt:_) ->
                    -- Take the most recent tag, and split the repository in,
                    -- the clean PatchSet "up to" the tag (ts), and a RL of
                    -- patches after the tag (r).
                    case splitOnTag lt ps of
                        Just (PatchSet ts xs :> r) -> PatchSet ts (xs+<+r)
                        _ -> impossible -- Because the tag is in ps.

-- @todo: we should not have to open the result of HashedRepo and
-- seal it.  Instead, update this function to work with type witnesses
-- by fixing DarcsRepo to match HashedRepo in the handling of
-- Repository state.
readRepo :: (IsRepoType rt, RepoPatch p)
         => Repository rt p wR wU wT
         -> IO (PatchSet rt p Origin wR)
readRepo r
    | formatHas HashedInventory (repoFormat r) = readRepoHashed r (repoLocation r)
    | otherwise = do Sealed ps <- Old.readOldRepo (repoLocation r)
                     return $ unsafeCoerceP ps

-- | XOR of all hashes of the patches' metadata.
-- It enables to quickly see whether two repositories
-- have the same patches, independently of their order.
-- It relies on the assumption that the same patch cannot
-- be present twice in a repository.
-- This checksum is not cryptographically secure,
-- see http://robotics.stanford.edu/~xb/crypto06b/ .
repoXor :: (IsRepoType rt, RepoPatch p)
        => Repository rt p wR wU wR -> IO SHA1
repoXor repo = do
  hashes <- mapRL (makePatchname . info) . patchSet2RL <$> readRepo repo
  return $ foldl' sha1Xor sha1zero hashes