-- Copyright (C) 2006-2007 David Roundy
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2, or (at your option)
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software Foundation,
-- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.

{-# OPTIONS_GHC -cpp -fglasgow-exts #-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}

#include "gadts.h"

module Darcs.Repository.HashedRepo ( revert_tentative_changes, finalize_tentative_changes,
                                     slurp_pristine, clean_pristine,
                                     copy_pristine, copy_partials_pristine,
                                     apply_to_tentative_pristine,
                                     add_to_tentative_inventory, remove_from_tentative_inventory,
                                     read_repo, read_tentative_repo, write_and_read_patch,
                                     write_tentative_inventory, copy_repo, slurp_all_but_darcs,
                                     readHashedPristineRoot, pris2inv
                                   ) where

import System.Directory ( doesFileExist, createDirectoryIfMissing )
import System.IO.Unsafe ( unsafeInterleaveIO )
import System.IO ( stderr, hPutStrLn )
import Data.List ( delete )
import Control.Monad ( unless )

import Workaround ( renameFile )
import Darcs.Flags ( DarcsFlag, Compression )
import Darcs.Patch.Set ( PatchSet, SealedPatchSet )
import Darcs.RepoPath ( FilePathLike, ioAbsoluteOrRemote, toPath )
import Darcs.Repository.Cache ( Cache, fetchFileUsingCache, speculateFileUsingCache,
                                writeFileUsingCache,
                                unionCaches, repo2cache, okayHash, takeHash,
                                HashedDir(..), hashedDir )
import Darcs.Repository.HashedIO ( applyHashed, slurpHashedPristine,
                                   copyHashed, copyPartialsHashed,
                                   clean_hashdir )
import Darcs.Repository.InternalTypes ( Repository(..), extractCache )
import Darcs.Hopefully ( PatchInfoAnd, patchInfoAndPatch, n2pia, info,
                         extractHash, createHashed )
import Darcs.SlurpDirectory ( Slurpy, empty_slurpy, slurp_remove, slurp )
import Darcs.Patch ( RepoPatch, Patchy, Named, showPatch, patch2patchinfo, readPatch )
import Darcs.Patch.Depends ( commute_to_end, slightly_optimize_patchset )
import Darcs.Patch.Info ( PatchInfo, showPatchInfo, human_friendly, readPatchInfo )
import Darcs.Witnesses.Ordered ( unsafeCoerceP, (:<)(..) )
import Darcs.Patch.FileName ( fp2fn )

import ByteStringUtils ( gzReadFilePS, dropSpace )
import qualified Data.ByteString as B (null, length, empty
                                      ,tail, take, drop, ByteString)
import qualified Data.ByteString.Char8 as BC (unpack, dropWhile, break, pack)

import Printer ( Doc, hcat, (<>), ($$), renderString, renderPS, text, invisiblePS )
import SHA1 ( sha1PS )
import Darcs.External ( copyFileOrUrl, cloneFile, fetchFilePS, Cachable( Uncachable ) )
import Darcs.Lock ( writeBinFile, writeDocBinFile, writeAtomicFilePS, appendBinFile, appendDocBinFile )
import Darcs.Utils ( withCurrentDirectory )
import Progress ( beginTedious, tediousSize, endTedious, debugMessage, finishedOneIO )
#include "impossible.h"
import Darcs.Witnesses.Ordered ( FL(..), RL(..),
                             mapRL, mapFL, lengthRL )
import Darcs.Witnesses.Sealed ( Sealed(..), seal, unseal )
import Darcs.Global ( darcsdir )

revert_tentative_changes :: IO ()
revert_tentative_changes =
    do cloneFile (darcsdir++"/hashed_inventory") (darcsdir++"/tentative_hashed_inventory")
       i <- gzReadFilePS (darcsdir++"/hashed_inventory")
       writeBinFile (darcsdir++"/tentative_pristine") $ "pristine:" ++ inv2pris i

finalize_tentative_changes :: RepoPatch p => Repository p C(r u t) -> Compression -> IO ()
finalize_tentative_changes r compr =
    do let t = darcsdir++"/tentative_hashed_inventory"
       -- first let's optimize it...
       debugMessage "Optimizing the inventory..."
       ps <- read_tentative_repo r "."
       write_tentative_inventory (extractCache r) compr ps
       -- then we'll add in the pristine cache,
       i <- gzReadFilePS t
       p <- gzReadFilePS $ darcsdir++"/tentative_pristine"
       writeDocBinFile t $ pris2inv (inv2pris p) i
       -- and rename it to its final value
       renameFile t $ darcsdir++"/hashed_inventory"
       -- note: in general we can't clean the pristine cache, because a
       -- simultaneous get might be in progress

readHashedPristineRoot :: Repository p C(r u t) -> IO (Maybe String)
readHashedPristineRoot (Repo d _ _ _) =
    withCurrentDirectory d $ do
      i <- (Just `fmap` gzReadFilePS (darcsdir++"/hashed_inventory")) `catch` (\_ -> return Nothing)
      return $ inv2pris `fmap` i

clean_pristine :: Repository p C(r u t) -> IO ()
clean_pristine r@(Repo d _ _ _) = withCurrentDirectory d $
   do -- we'll remove obsolete bits of our pristine cache
      debugMessage "Cleaning out the pristine cache..."
      i <- gzReadFilePS (darcsdir++"/hashed_inventory")
      clean_hashdir (extractCache r) HashedPristineDir [inv2pris i]

add_to_tentative_inventory :: RepoPatch p => Cache -> Compression -> PatchInfoAnd p C(x y) -> IO FilePath
add_to_tentative_inventory c compr p =
    do hash <- snd `fmap` write_patch_if_necesary c compr p
       appendDocBinFile (darcsdir++"/tentative_hashed_inventory") $ showPatchInfo $ info p
       appendBinFile (darcsdir++"/tentative_hashed_inventory") $ "\nhash: " ++ hash ++ "\n"
       return $ darcsdir++"/patches/" ++ hash

remove_from_tentative_inventory :: RepoPatch p => Repository p C(r u t) -> Compression
                                -> FL (Named p) C(x t) -> IO ()
remove_from_tentative_inventory repo compr to_remove =
       -- FIXME: This algorithm should be *far* simpler.  All we need do is
       -- to to remove the patches from a patchset and then write that
       -- patchset.  The commutation behavior of PatchInfoAnd should track
       -- which patches need to be rewritten for us.
    do allpatches <- read_tentative_repo repo "."
       skipped :< _ <- return $ commute_to_end to_remove allpatches
       okay <- simple_remove_from_tentative_inventory repo compr
               (mapFL patch2patchinfo to_remove ++ mapFL patch2patchinfo skipped)
       unless okay $ bug "bug in HashedRepo.remove_from_tentative_inventory"
       sequence_ $ mapFL (add_to_tentative_inventory (extractCache repo) compr . n2pia) skipped

simple_remove_from_tentative_inventory :: forall p C(r u t). RepoPatch p =>
                                          Repository p C(r u t) -> Compression -> [PatchInfo] -> IO Bool
simple_remove_from_tentative_inventory repo compr pis = do
    inv <- read_tentative_repo repo "."
    case cut_inv pis inv of
      Nothing -> return False
      Just (Sealed inv') -> do write_tentative_inventory (extractCache repo) compr inv'
                               return True
    where cut_inv :: [PatchInfo] -> PatchSet p C(x) -> Maybe (SealedPatchSet p)
          cut_inv [] x = Just $ seal x
          cut_inv x (NilRL:<:rs) = cut_inv x rs
          cut_inv xs ((hp:<:r):<:rs) | info hp `elem` xs = cut_inv (info hp `delete` xs) (r:<:rs)
          cut_inv _ _ = Nothing

writeHashFile :: Cache -> Compression -> HashedDir -> Doc -> IO String
writeHashFile c compr subdir d = do debugMessage $ "Writing hash file to "++(hashedDir subdir)
                                    writeFileUsingCache c compr subdir $ renderPS d

read_repo :: RepoPatch p => Repository p C(r u t) -> String -> IO (PatchSet p C(r))
read_repo repo d = do
  realdir <- toPath `fmap` ioAbsoluteOrRemote d
  Sealed ps <- read_repo_private repo realdir "hashed_inventory" `catch`
                 (\e -> do hPutStrLn stderr ("Invalid repository:  " ++ realdir)
                           ioError e)
  return $ unsafeCoerceP ps

read_tentative_repo :: RepoPatch p => Repository p C(r u t) -> String -> IO (PatchSet p C(t))
read_tentative_repo repo d = do
  realdir <- toPath `fmap` ioAbsoluteOrRemote d
  Sealed ps <- read_repo_private repo realdir "tentative_hashed_inventory" `catch`
                 (\e -> do hPutStrLn stderr ("Invalid repository:  " ++ realdir)
                           ioError e)
  return $ unsafeCoerceP ps

read_repo_private :: RepoPatch p => Repository p C(r u t)
                  -> FilePath -> FilePath -> IO (SealedPatchSet p)
read_repo_private repo d iname =
 do inventories <- read_inventory_private repo (d++"/"++darcsdir) iname
    parseinvs inventories
    where read_patches :: RepoPatch p => [(PatchInfo, String)] -> IO (Sealed (RL (PatchInfoAnd p) C(x)))
          read_patches [] = return $ seal NilRL
          read_patches allis@((i1,h1):is1) =
              lift2Sealed (\p rest -> i1 `patchInfoAndPatch` p :<: rest)
                          (rp is1)
                          (createHashed h1 (const $ speculate h1 allis >> parse i1 h1))
              where rp :: RepoPatch p => [(PatchInfo, String)] -> IO (Sealed (RL (PatchInfoAnd p) C(x)))
                    rp [] = return $ seal NilRL
                    rp [(i,h),(il,hl)] =
                        lift2Sealed (\p rest -> i `patchInfoAndPatch` p :<: rest)
                                    (rp [(il,hl)])
                                    (createHashed h (const $ speculate h (reverse allis) >> parse i h))
                    rp ((i,h):is) = lift2Sealed (\p rest -> i `patchInfoAndPatch` p :<: rest)
                                                (rp is)
                                                (createHashed h (parse i))
          speculate :: String -> [(PatchInfo, String)] -> IO ()
          speculate h is = do already_got_one <- doesFileExist (d++"/"++darcsdir++"/patches/"++h)
                              unless already_got_one $
                                     mapM_ (speculateFileUsingCache (extractCache repo) HashedPatchesDir . snd) is
          parse :: Patchy p => PatchInfo -> String -> IO (Sealed (p C(x)))
          parse i h = do debugMessage ("Reading patch file: "++ show (human_friendly i))
                         (fn,ps) <- fetchFileUsingCache (extractCache repo) HashedPatchesDir h
                         case readPatch ps of
                           Just (p,_) -> return p
                           Nothing -> fail $ unlines ["Couldn't parse file "++fn,
                                                      "which is patch",
                                                      renderString $ human_friendly i]
          parseinvs :: RepoPatch p => [[(PatchInfo, String)]] -> IO (SealedPatchSet p)
          parseinvs [] = return $ seal NilRL
          parseinvs (i:is) = lift2Sealed (:<:) (parseinvs is) (read_patches i)
          lift2Sealed :: (FORALL(y z) q C(y z) -> p C(x y) -> r C(x z))
                      -> IO (Sealed (p C(x))) -> (FORALL(b) IO (Sealed (q C(b)))) -> IO (Sealed (r C(x)))
          lift2Sealed f iox ioy = do Sealed x <- unseal seal `fmap` unsafeInterleaveIO iox
                                     Sealed y <- unseal seal `fmap` unsafeInterleaveIO ioy
                                     return $ seal $ f y x

write_and_read_patch :: RepoPatch p => Cache -> Compression -> PatchInfoAnd p C(x y)
                     -> IO (PatchInfoAnd p C(x y))
write_and_read_patch c compr p = do (i,h) <- write_patch_if_necesary c compr p
                                    unsafeInterleaveIO $ readp h i
    where parse i h = do debugMessage ("Rereading patch file: "++ show (human_friendly i))
                         (fn,ps) <- fetchFileUsingCache c HashedPatchesDir h
                         case readPatch ps of
                           Just (x,_) -> return x
                           Nothing -> fail $ unlines ["Couldn't parse patch file "++fn,
                                                      "which is",
                                                      renderString $ human_friendly i]
          readp h i = do Sealed x <- createHashed h (parse i)
                         return $ patchInfoAndPatch i $ unsafeCoerceP x

write_tentative_inventory :: RepoPatch p => Cache -> Compression -> PatchSet p C(x) -> IO ()
write_tentative_inventory c compr = write_either_inventory c compr "tentative_hashed_inventory"

copy_repo :: RepoPatch p => Repository p C(r u t) -> [DarcsFlag] -> String -> IO ()
copy_repo repo@(Repo outr _ _ _) opts inr = do
    createDirectoryIfMissing False (outr++"/"++darcsdir++"/inventories")
    copyFileOrUrl opts (inr++"/"++darcsdir++"/hashed_inventory") (outr++"/"++darcsdir++"/hashed_inventory")
                  Uncachable -- no need to copy anything but hashed_inventory!
    appendBinFile (outr++"/"++darcsdir++"/prefs/sources") (show $ repo2cache inr `unionCaches` extractCache repo)
    debugMessage "Done copying hashed inventory."

write_either_inventory :: RepoPatch p => Cache -> Compression -> String -> PatchSet p C(x) -> IO ()
write_either_inventory c compr iname x =
    do createDirectoryIfMissing False $ darcsdir++"/inventories"
       let k = "Writing inventory"
       beginTedious k
       tediousSize k (lengthRL x)
       hsh <- write_inventory_private k c compr $ slightly_optimize_patchset x
       endTedious k
       case hsh of
         Nothing -> writeBinFile (darcsdir++"/"++iname) ""
         Just h -> gzReadFilePS (darcsdir++"/inventories/"++h) >>= writeAtomicFilePS (darcsdir++"/"++iname)

write_inventory_private :: RepoPatch p => String -> Cache -> Compression
                        -> PatchSet p C(x) -> IO (Maybe String)
write_inventory_private _ _ _ NilRL = return Nothing
write_inventory_private _ _ _ (NilRL:<:NilRL) = return Nothing
write_inventory_private _ _ _ (NilRL:<:_) = -- This shouldn't be possible, so best to check...
    bug "malformed PatchSet in HashedRepo.write_inventory_private"
write_inventory_private k c compr (x:<:xs) =
  do resthash <- write_inventory_private k c compr xs
     finishedOneIO k (case resthash of Nothing -> ""; Just h -> h)
     inventory <- sequence $ mapRL (write_patch_if_necesary c compr) x
     let inventorylist = hcat (map pihash $ reverse inventory)
         inventorycontents = case resthash of
                             Just lasthash -> text ("Starting with inventory:\n"++lasthash) $$
                                              inventorylist
                             _ -> inventorylist
     hash <- writeHashFile c compr HashedInventoriesDir inventorycontents
     return $ Just hash

write_patch_if_necesary :: RepoPatch p => Cache -> Compression
                        -> PatchInfoAnd p C(x y) -> IO (PatchInfo, String)
write_patch_if_necesary c compr hp =
    seq infohp $ case extractHash hp of
                   Right h -> return (infohp, h)
                   Left p -> (\h -> (infohp, h)) `fmap`
                             writeHashFile c compr HashedPatchesDir (showPatch p)
    where infohp = info hp

pihash :: (PatchInfo,String) -> Doc
pihash (pinf,hash) = showPatchInfo pinf $$ text ("hash: " ++ hash ++ "\n")

read_inventory_private :: Repository p C(r u t) -> String -> String
                       -> IO [[(PatchInfo, String)]]
read_inventory_private repo d iname = do
    i <- skip_pristine `fmap` fetchFilePS (d++"/"++iname) Uncachable
    (rest,str) <- case BC.break ((==)'\n') i of
                  (swt,pistr) | swt == BC.pack "Starting with inventory:" ->
                    case BC.break ((==)'\n') $ B.tail pistr of
                    (h,thisinv) | okayHash $ BC.unpack h ->
                      do r <- unsafeInterleaveIO $ read_inventories
                              (extractCache repo) (BC.unpack h) -- don't unpack twice!
                         return (r,thisinv)
                    _ -> fail $ "Bad hash in " ++ d ++ "/"++darcsdir++"/" ++ iname
                  _ -> return ([],i)
    return $ reverse (read_patch_ids str) : rest

read_inventories :: Cache -> String -> IO [[(PatchInfo, String)]]
read_inventories cache ihash = do
    (fn,i_and_p) <- fetchFileUsingCache cache HashedInventoriesDir ihash
    let i = skip_pristine i_and_p
    (rest,str) <- case BC.break ((==)'\n') i of
                  (swt,pistr) | swt == BC.pack "Starting with inventory:" ->
                    case BC.break ((==)'\n') $ B.tail pistr of
                    (h,thisinv) | okayHash $ BC.unpack h ->
                      do r <- unsafeInterleaveIO $
                              read_inventories cache (BC.unpack h) -- again. no.
                         return (r,thisinv)
                    _ -> fail $ "Bad hash in file " ++ fn
                  _ -> return ([],i)
    return $ reverse (read_patch_ids str) : rest

read_patch_ids :: B.ByteString -> [(PatchInfo, String)]
read_patch_ids inv | B.null inv = []
read_patch_ids inv = case readPatchInfo inv of
                     Nothing -> []
                     Just (pinfo,r) ->
                         case readHash r of
                         Nothing -> []
                         Just (h,r') -> (pinfo,h) : read_patch_ids r'

readHash :: B.ByteString -> Maybe (String, B.ByteString)
readHash s = let s' = dropSpace s
                 (l,r) = BC.break ((==)'\n') s'
                 (kw,h) = BC.break ((==)' ') l
             in if kw /= BC.pack "hash:" || B.length h <= 1
                then Nothing
                else Just (BC.unpack $ B.tail h,r)

apply_pristine :: Patchy q => Cache -> [DarcsFlag] -> String -> String -> q C(x y) -> IO ()
apply_pristine c opts d iname p =
    do i <- gzReadFilePS (d++"/"++iname)
       h <- applyHashed c opts (inv2pris i) p
       writeDocBinFile (d++"/"++iname) $ pris2inv h i

apply_to_tentative_pristine :: Patchy q => Cache -> [DarcsFlag] -> q C(x y) -> IO ()
apply_to_tentative_pristine c opts p = apply_pristine c opts "." (darcsdir++"/tentative_pristine") p

slurp_pristine :: Cache -> Compression -> String -> String -> IO Slurpy
slurp_pristine c compr d iname = do
    i <- fetchFilePS (d++"/"++iname) Uncachable
    slurp_pristine_private c compr i

slurp_pristine_private :: Cache -> Compression -> B.ByteString -> IO Slurpy
slurp_pristine_private c compr inv = case inv2pris inv of
                                    h | h == sha1PS B.empty -> return empty_slurpy
                                      | otherwise -> slurpHashedPristine c compr h

copy_pristine :: Cache -> Compression -> String -> String -> IO ()
copy_pristine c compr d iname = do
    i <- fetchFilePS (d++"/"++iname) Uncachable
    debugMessage $ "Copying hashed pristine tree: "++inv2pris i
    let k = "Copying pristine"
    beginTedious k
    copyHashed k c compr $ inv2pris i
    endTedious k

copy_partials_pristine :: FilePathLike fp =>
                          Cache -> Compression -> String -> String -> [fp] -> IO ()
copy_partials_pristine c compr d iname fps =
  do i <- fetchFilePS (d++"/"++iname) Uncachable
     copyPartialsHashed c compr (inv2pris i) fps

inv2pris :: B.ByteString -> String
inv2pris inv | B.take pristine_name_length inv == pristine_name =
                 case takeHash $ B.drop pristine_name_length inv of
                 Just (h,_) -> h
                 Nothing -> error "Bad hash in inventory!"
             | otherwise = sha1PS B.empty

pris2inv :: String -> B.ByteString -> Doc
pris2inv h inv = invisiblePS pristine_name <> text h $$ invisiblePS (skip_pristine inv)

pristine_name :: B.ByteString
pristine_name = BC.pack "pristine:"

skip_pristine :: B.ByteString -> B.ByteString
skip_pristine ps
    | B.take pristine_name_length ps == pristine_name = B.drop 1 $ BC.dropWhile (/= '\n') $
                                                        B.drop pristine_name_length ps
    | otherwise = ps

pristine_name_length :: Int
pristine_name_length = B.length pristine_name

slurp_all_but_darcs :: FilePath -> IO Slurpy
slurp_all_but_darcs d = do s <- slurp d
                           case slurp_remove (fp2fn $ "./"++darcsdir) s of
                             Nothing -> return s
                             Just s' -> return s'